} */
let marker
return start
/**
* Start of thematic break.
*
* ```markdown
* > | ***
* ^
* ```
*
* @type {State}
*/
function start(code) {
effects.enter('thematicBreak')
// To do: parse indent like `markdown-rs`.
return before(code)
}
/**
* After optional whitespace, at marker.
*
* ```markdown
* > | ***
* ^
* ```
*
* @type {State}
*/
function before(code) {
marker = code
return atBreak(code)
}
/**
* After something, before something else.
*
* ```markdown
* > | ***
* ^
* ```
*
* @type {State}
*/
function atBreak(code) {
if (code === marker) {
effects.enter('thematicBreakSequence')
return sequence(code)
}
if (size >= 3 && (code === null || markdownLineEnding(code))) {
effects.exit('thematicBreak')
return ok(code)
}
return nok(code)
}
/**
* In sequence.
*
* ```markdown
* > | ***
* ^
* ```
*
* @type {State}
*/
function sequence(code) {
if (code === marker) {
effects.consume(code)
size++
return sequence
}
effects.exit('thematicBreakSequence')
return markdownSpace(code)
? factorySpace(effects, atBreak, 'whitespace')(code)
: atBreak(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/list.js
/**
* @typedef {import('micromark-util-types').Code} Code
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').ContainerState} ContainerState
* @typedef {import('micromark-util-types').Exiter} Exiter
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const list = {
name: 'list',
tokenize: tokenizeListStart,
continuation: {
tokenize: tokenizeListContinuation
},
exit: tokenizeListEnd
}
/** @type {Construct} */
const listItemPrefixWhitespaceConstruct = {
tokenize: tokenizeListItemPrefixWhitespace,
partial: true
}
/** @type {Construct} */
const indentConstruct = {
tokenize: tokenizeIndent,
partial: true
}
// To do: `markdown-rs` parses list items on their own and later stitches them
// together.
/**
* @type {Tokenizer}
* @this {TokenizeContext}
*/
function tokenizeListStart(effects, ok, nok) {
const self = this
const tail = self.events[self.events.length - 1]
let initialSize =
tail && tail[1].type === 'linePrefix'
? tail[2].sliceSerialize(tail[1], true).length
: 0
let size = 0
return start
/** @type {State} */
function start(code) {
const kind =
self.containerState.type ||
(code === 42 || code === 43 || code === 45
? 'listUnordered'
: 'listOrdered')
if (
kind === 'listUnordered'
? !self.containerState.marker || code === self.containerState.marker
: asciiDigit(code)
) {
if (!self.containerState.type) {
self.containerState.type = kind
effects.enter(kind, {
_container: true
})
}
if (kind === 'listUnordered') {
effects.enter('listItemPrefix')
return code === 42 || code === 45
? effects.check(thematicBreak, nok, atMarker)(code)
: atMarker(code)
}
if (!self.interrupt || code === 49) {
effects.enter('listItemPrefix')
effects.enter('listItemValue')
return inside(code)
}
}
return nok(code)
}
/** @type {State} */
function inside(code) {
if (asciiDigit(code) && ++size < 10) {
effects.consume(code)
return inside
}
if (
(!self.interrupt || size < 2) &&
(self.containerState.marker
? code === self.containerState.marker
: code === 41 || code === 46)
) {
effects.exit('listItemValue')
return atMarker(code)
}
return nok(code)
}
/**
* @type {State}
**/
function atMarker(code) {
effects.enter('listItemMarker')
effects.consume(code)
effects.exit('listItemMarker')
self.containerState.marker = self.containerState.marker || code
return effects.check(
blankLine,
// Can’t be empty when interrupting.
self.interrupt ? nok : onBlank,
effects.attempt(
listItemPrefixWhitespaceConstruct,
endOfPrefix,
otherPrefix
)
)
}
/** @type {State} */
function onBlank(code) {
self.containerState.initialBlankLine = true
initialSize++
return endOfPrefix(code)
}
/** @type {State} */
function otherPrefix(code) {
if (markdownSpace(code)) {
effects.enter('listItemPrefixWhitespace')
effects.consume(code)
effects.exit('listItemPrefixWhitespace')
return endOfPrefix
}
return nok(code)
}
/** @type {State} */
function endOfPrefix(code) {
self.containerState.size =
initialSize +
self.sliceSerialize(effects.exit('listItemPrefix'), true).length
return ok(code)
}
}
/**
* @type {Tokenizer}
* @this {TokenizeContext}
*/
function tokenizeListContinuation(effects, ok, nok) {
const self = this
self.containerState._closeFlow = undefined
return effects.check(blankLine, onBlank, notBlank)
/** @type {State} */
function onBlank(code) {
self.containerState.furtherBlankLines =
self.containerState.furtherBlankLines ||
self.containerState.initialBlankLine
// We have a blank line.
// Still, try to consume at most the items size.
return factorySpace(
effects,
ok,
'listItemIndent',
self.containerState.size + 1
)(code)
}
/** @type {State} */
function notBlank(code) {
if (self.containerState.furtherBlankLines || !markdownSpace(code)) {
self.containerState.furtherBlankLines = undefined
self.containerState.initialBlankLine = undefined
return notInCurrentItem(code)
}
self.containerState.furtherBlankLines = undefined
self.containerState.initialBlankLine = undefined
return effects.attempt(indentConstruct, ok, notInCurrentItem)(code)
}
/** @type {State} */
function notInCurrentItem(code) {
// While we do continue, we signal that the flow should be closed.
self.containerState._closeFlow = true
// As we’re closing flow, we’re no longer interrupting.
self.interrupt = undefined
// Always populated by defaults.
return factorySpace(
effects,
effects.attempt(list, ok, nok),
'linePrefix',
self.parser.constructs.disable.null.includes('codeIndented')
? undefined
: 4
)(code)
}
}
/**
* @type {Tokenizer}
* @this {TokenizeContext}
*/
function tokenizeIndent(effects, ok, nok) {
const self = this
return factorySpace(
effects,
afterPrefix,
'listItemIndent',
self.containerState.size + 1
)
/** @type {State} */
function afterPrefix(code) {
const tail = self.events[self.events.length - 1]
return tail &&
tail[1].type === 'listItemIndent' &&
tail[2].sliceSerialize(tail[1], true).length === self.containerState.size
? ok(code)
: nok(code)
}
}
/**
* @type {Exiter}
* @this {TokenizeContext}
*/
function tokenizeListEnd(effects) {
effects.exit(this.containerState.type)
}
/**
* @type {Tokenizer}
* @this {TokenizeContext}
*/
function tokenizeListItemPrefixWhitespace(effects, ok, nok) {
const self = this
// Always populated by defaults.
return factorySpace(
effects,
afterPrefix,
'listItemPrefixWhitespace',
self.parser.constructs.disable.null.includes('codeIndented')
? undefined
: 4 + 1
)
/** @type {State} */
function afterPrefix(code) {
const tail = self.events[self.events.length - 1]
return !markdownSpace(code) &&
tail &&
tail[1].type === 'listItemPrefixWhitespace'
? ok(code)
: nok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/block-quote.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').Exiter} Exiter
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const blockQuote = {
name: 'blockQuote',
tokenize: tokenizeBlockQuoteStart,
continuation: {
tokenize: tokenizeBlockQuoteContinuation
},
exit
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeBlockQuoteStart(effects, ok, nok) {
const self = this
return start
/**
* Start of block quote.
*
* ```markdown
* > | > a
* ^
* ```
*
* @type {State}
*/
function start(code) {
if (code === 62) {
const state = self.containerState
if (!state.open) {
effects.enter('blockQuote', {
_container: true
})
state.open = true
}
effects.enter('blockQuotePrefix')
effects.enter('blockQuoteMarker')
effects.consume(code)
effects.exit('blockQuoteMarker')
return after
}
return nok(code)
}
/**
* After `>`, before optional whitespace.
*
* ```markdown
* > | > a
* ^
* ```
*
* @type {State}
*/
function after(code) {
if (markdownSpace(code)) {
effects.enter('blockQuotePrefixWhitespace')
effects.consume(code)
effects.exit('blockQuotePrefixWhitespace')
effects.exit('blockQuotePrefix')
return ok
}
effects.exit('blockQuotePrefix')
return ok(code)
}
}
/**
* Start of block quote continuation.
*
* ```markdown
* | > a
* > | > b
* ^
* ```
*
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeBlockQuoteContinuation(effects, ok, nok) {
const self = this
return contStart
/**
* Start of block quote continuation.
*
* Also used to parse the first block quote opening.
*
* ```markdown
* | > a
* > | > b
* ^
* ```
*
* @type {State}
*/
function contStart(code) {
if (markdownSpace(code)) {
// Always populated by defaults.
return factorySpace(
effects,
contBefore,
'linePrefix',
self.parser.constructs.disable.null.includes('codeIndented')
? undefined
: 4
)(code)
}
return contBefore(code)
}
/**
* At `>`, after optional whitespace.
*
* Also used to parse the first block quote opening.
*
* ```markdown
* | > a
* > | > b
* ^
* ```
*
* @type {State}
*/
function contBefore(code) {
return effects.attempt(blockQuote, ok, nok)(code)
}
}
/** @type {Exiter} */
function exit(effects) {
effects.exit('blockQuote')
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-factory-destination/index.js
/**
* @typedef {import('micromark-util-types').Effects} Effects
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenType} TokenType
*/
/**
* Parse destinations.
*
* ###### Examples
*
* ```markdown
*
* b>
*
*
* a
* a\)b
* a(b)c
* a(b)
* ```
*
* @param {Effects} effects
* Context.
* @param {State} ok
* State switched to when successful.
* @param {State} nok
* State switched to when unsuccessful.
* @param {TokenType} type
* Type for whole (`` or `b`).
* @param {TokenType} literalType
* Type when enclosed (``).
* @param {TokenType} literalMarkerType
* Type for enclosing (`<` and `>`).
* @param {TokenType} rawType
* Type when not enclosed (`b`).
* @param {TokenType} stringType
* Type for the value (`a` or `b`).
* @param {number | undefined} [max=Infinity]
* Depth of nested parens (inclusive).
* @returns {State}
* Start state.
*/ // eslint-disable-next-line max-params
function factoryDestination(
effects,
ok,
nok,
type,
literalType,
literalMarkerType,
rawType,
stringType,
max
) {
const limit = max || Number.POSITIVE_INFINITY
let balance = 0
return start
/**
* Start of destination.
*
* ```markdown
* > |
* ^
* > | aa
* ^
* ```
*
* @type {State}
*/
function start(code) {
if (code === 60) {
effects.enter(type)
effects.enter(literalType)
effects.enter(literalMarkerType)
effects.consume(code)
effects.exit(literalMarkerType)
return enclosedBefore
}
// ASCII control, space, closing paren.
if (code === null || code === 32 || code === 41 || asciiControl(code)) {
return nok(code)
}
effects.enter(type)
effects.enter(rawType)
effects.enter(stringType)
effects.enter('chunkString', {
contentType: 'string'
})
return raw(code)
}
/**
* After `<`, at an enclosed destination.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function enclosedBefore(code) {
if (code === 62) {
effects.enter(literalMarkerType)
effects.consume(code)
effects.exit(literalMarkerType)
effects.exit(literalType)
effects.exit(type)
return ok
}
effects.enter(stringType)
effects.enter('chunkString', {
contentType: 'string'
})
return enclosed(code)
}
/**
* In enclosed destination.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function enclosed(code) {
if (code === 62) {
effects.exit('chunkString')
effects.exit(stringType)
return enclosedBefore(code)
}
if (code === null || code === 60 || markdownLineEnding(code)) {
return nok(code)
}
effects.consume(code)
return code === 92 ? enclosedEscape : enclosed
}
/**
* After `\`, at a special character.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function enclosedEscape(code) {
if (code === 60 || code === 62 || code === 92) {
effects.consume(code)
return enclosed
}
return enclosed(code)
}
/**
* In raw destination.
*
* ```markdown
* > | aa
* ^
* ```
*
* @type {State}
*/
function raw(code) {
if (
!balance &&
(code === null || code === 41 || markdownLineEndingOrSpace(code))
) {
effects.exit('chunkString')
effects.exit(stringType)
effects.exit(rawType)
effects.exit(type)
return ok(code)
}
if (balance < limit && code === 40) {
effects.consume(code)
balance++
return raw
}
if (code === 41) {
effects.consume(code)
balance--
return raw
}
// ASCII control (but *not* `\0`) and space and `(`.
// Note: in `markdown-rs`, `\0` exists in codes, in `micromark-js` it
// doesn’t.
if (code === null || code === 32 || code === 40 || asciiControl(code)) {
return nok(code)
}
effects.consume(code)
return code === 92 ? rawEscape : raw
}
/**
* After `\`, at special character.
*
* ```markdown
* > | a\*a
* ^
* ```
*
* @type {State}
*/
function rawEscape(code) {
if (code === 40 || code === 41 || code === 92) {
effects.consume(code)
return raw
}
return raw(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-factory-label/index.js
/**
* @typedef {import('micromark-util-types').Effects} Effects
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').TokenType} TokenType
*/
/**
* Parse labels.
*
* > 👉 **Note**: labels in markdown are capped at 999 characters in the string.
*
* ###### Examples
*
* ```markdown
* [a]
* [a
* b]
* [a\]b]
* ```
*
* @this {TokenizeContext}
* Tokenize context.
* @param {Effects} effects
* Context.
* @param {State} ok
* State switched to when successful.
* @param {State} nok
* State switched to when unsuccessful.
* @param {TokenType} type
* Type of the whole label (`[a]`).
* @param {TokenType} markerType
* Type for the markers (`[` and `]`).
* @param {TokenType} stringType
* Type for the identifier (`a`).
* @returns {State}
* Start state.
*/ // eslint-disable-next-line max-params
function factoryLabel(effects, ok, nok, type, markerType, stringType) {
const self = this
let size = 0
/** @type {boolean} */
let seen
return start
/**
* Start of label.
*
* ```markdown
* > | [a]
* ^
* ```
*
* @type {State}
*/
function start(code) {
effects.enter(type)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.enter(stringType)
return atBreak
}
/**
* In label, at something, before something else.
*
* ```markdown
* > | [a]
* ^
* ```
*
* @type {State}
*/
function atBreak(code) {
if (
size > 999 ||
code === null ||
code === 91 ||
(code === 93 && !seen) ||
// To do: remove in the future once we’ve switched from
// `micromark-extension-footnote` to `micromark-extension-gfm-footnote`,
// which doesn’t need this.
// Hidden footnotes hook.
/* c8 ignore next 3 */
(code === 94 &&
!size &&
'_hiddenFootnoteSupport' in self.parser.constructs)
) {
return nok(code)
}
if (code === 93) {
effects.exit(stringType)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.exit(type)
return ok
}
// To do: indent? Link chunks and EOLs together?
if (markdownLineEnding(code)) {
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return atBreak
}
effects.enter('chunkString', {
contentType: 'string'
})
return labelInside(code)
}
/**
* In label, in text.
*
* ```markdown
* > | [a]
* ^
* ```
*
* @type {State}
*/
function labelInside(code) {
if (
code === null ||
code === 91 ||
code === 93 ||
markdownLineEnding(code) ||
size++ > 999
) {
effects.exit('chunkString')
return atBreak(code)
}
effects.consume(code)
if (!seen) seen = !markdownSpace(code)
return code === 92 ? labelEscape : labelInside
}
/**
* After `\`, at a special character.
*
* ```markdown
* > | [a\*a]
* ^
* ```
*
* @type {State}
*/
function labelEscape(code) {
if (code === 91 || code === 92 || code === 93) {
effects.consume(code)
size++
return labelInside
}
return labelInside(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-factory-title/index.js
/**
* @typedef {import('micromark-util-types').Code} Code
* @typedef {import('micromark-util-types').Effects} Effects
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenType} TokenType
*/
/**
* Parse titles.
*
* ###### Examples
*
* ```markdown
* "a"
* 'b'
* (c)
* "a
* b"
* 'a
* b'
* (a\)b)
* ```
*
* @param {Effects} effects
* Context.
* @param {State} ok
* State switched to when successful.
* @param {State} nok
* State switched to when unsuccessful.
* @param {TokenType} type
* Type of the whole title (`"a"`, `'b'`, `(c)`).
* @param {TokenType} markerType
* Type for the markers (`"`, `'`, `(`, and `)`).
* @param {TokenType} stringType
* Type for the value (`a`).
* @returns {State}
* Start state.
*/ // eslint-disable-next-line max-params
function factoryTitle(effects, ok, nok, type, markerType, stringType) {
/** @type {NonNullable} */
let marker
return start
/**
* Start of title.
*
* ```markdown
* > | "a"
* ^
* ```
*
* @type {State}
*/
function start(code) {
if (code === 34 || code === 39 || code === 40) {
effects.enter(type)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
marker = code === 40 ? 41 : code
return begin
}
return nok(code)
}
/**
* After opening marker.
*
* This is also used at the closing marker.
*
* ```markdown
* > | "a"
* ^
* ```
*
* @type {State}
*/
function begin(code) {
if (code === marker) {
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.exit(type)
return ok
}
effects.enter(stringType)
return atBreak(code)
}
/**
* At something, before something else.
*
* ```markdown
* > | "a"
* ^
* ```
*
* @type {State}
*/
function atBreak(code) {
if (code === marker) {
effects.exit(stringType)
return begin(marker)
}
if (code === null) {
return nok(code)
}
// Note: blank lines can’t exist in content.
if (markdownLineEnding(code)) {
// To do: use `space_or_tab_eol_with_options`, connect.
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return factorySpace(effects, atBreak, 'linePrefix')
}
effects.enter('chunkString', {
contentType: 'string'
})
return inside(code)
}
/**
*
*
* @type {State}
*/
function inside(code) {
if (code === marker || code === null || markdownLineEnding(code)) {
effects.exit('chunkString')
return atBreak(code)
}
effects.consume(code)
return code === 92 ? escape : inside
}
/**
* After `\`, at a special character.
*
* ```markdown
* > | "a\*b"
* ^
* ```
*
* @type {State}
*/
function escape(code) {
if (code === marker || code === 92) {
effects.consume(code)
return inside
}
return inside(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-factory-whitespace/index.js
/**
* @typedef {import('micromark-util-types').Effects} Effects
* @typedef {import('micromark-util-types').State} State
*/
/**
* Parse spaces and tabs.
*
* There is no `nok` parameter:
*
* * line endings or spaces in markdown are often optional, in which case this
* factory can be used and `ok` will be switched to whether spaces were found
* or not
* * one line ending or space can be detected with
* `markdownLineEndingOrSpace(code)` right before using `factoryWhitespace`
*
* @param {Effects} effects
* Context.
* @param {State} ok
* State switched to when successful.
* @returns
* Start state.
*/
function factoryWhitespace(effects, ok) {
/** @type {boolean} */
let seen
return start
/** @type {State} */
function start(code) {
if (markdownLineEnding(code)) {
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
seen = true
return start
}
if (markdownSpace(code)) {
return factorySpace(
effects,
start,
seen ? 'linePrefix' : 'lineSuffix'
)(code)
}
return ok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-util-normalize-identifier/index.js
/**
* Normalize an identifier (as found in references, definitions).
*
* Collapses markdown whitespace, trim, and then lower- and uppercase.
*
* Some characters are considered “uppercase”, such as U+03F4 (`ϴ`), but if their
* lowercase counterpart (U+03B8 (`θ`)) is uppercased will result in a different
* uppercase character (U+0398 (`Θ`)).
* So, to get a canonical form, we perform both lower- and uppercase.
*
* Using uppercase last makes sure keys will never interact with default
* prototypal values (such as `constructor`): nothing in the prototype of
* `Object` is uppercase.
*
* @param {string} value
* Identifier to normalize.
* @returns {string}
* Normalized identifier.
*/
function normalizeIdentifier(value) {
return (
value
// Collapse markdown whitespace.
.replace(/[\t\n\r ]+/g, ' ')
// Trim.
.replace(/^ | $/g, '')
// Some characters are considered “uppercase”, but if their lowercase
// counterpart is uppercased will result in a different uppercase
// character.
// Hence, to get that form, we perform both lower- and uppercase.
// Upper case makes sure keys will not interact with default prototypal
// methods: no method is uppercase.
.toLowerCase()
.toUpperCase()
)
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/definition.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const definition = {
name: 'definition',
tokenize: tokenizeDefinition
}
/** @type {Construct} */
const titleBefore = {
tokenize: tokenizeTitleBefore,
partial: true
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeDefinition(effects, ok, nok) {
const self = this
/** @type {string} */
let identifier
return start
/**
* At start of a definition.
*
* ```markdown
* > | [a]: b "c"
* ^
* ```
*
* @type {State}
*/
function start(code) {
// Do not interrupt paragraphs (but do follow definitions).
// To do: do `interrupt` the way `markdown-rs` does.
// To do: parse whitespace the way `markdown-rs` does.
effects.enter('definition')
return before(code)
}
/**
* After optional whitespace, at `[`.
*
* ```markdown
* > | [a]: b "c"
* ^
* ```
*
* @type {State}
*/
function before(code) {
// To do: parse whitespace the way `markdown-rs` does.
return factoryLabel.call(
self,
effects,
labelAfter,
// Note: we don’t need to reset the way `markdown-rs` does.
nok,
'definitionLabel',
'definitionLabelMarker',
'definitionLabelString'
)(code)
}
/**
* After label.
*
* ```markdown
* > | [a]: b "c"
* ^
* ```
*
* @type {State}
*/
function labelAfter(code) {
identifier = normalizeIdentifier(
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
)
if (code === 58) {
effects.enter('definitionMarker')
effects.consume(code)
effects.exit('definitionMarker')
return markerAfter
}
return nok(code)
}
/**
* After marker.
*
* ```markdown
* > | [a]: b "c"
* ^
* ```
*
* @type {State}
*/
function markerAfter(code) {
// Note: whitespace is optional.
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, destinationBefore)(code)
: destinationBefore(code)
}
/**
* Before destination.
*
* ```markdown
* > | [a]: b "c"
* ^
* ```
*
* @type {State}
*/
function destinationBefore(code) {
return factoryDestination(
effects,
destinationAfter,
// Note: we don’t need to reset the way `markdown-rs` does.
nok,
'definitionDestination',
'definitionDestinationLiteral',
'definitionDestinationLiteralMarker',
'definitionDestinationRaw',
'definitionDestinationString'
)(code)
}
/**
* After destination.
*
* ```markdown
* > | [a]: b "c"
* ^
* ```
*
* @type {State}
*/
function destinationAfter(code) {
return effects.attempt(titleBefore, after, after)(code)
}
/**
* After definition.
*
* ```markdown
* > | [a]: b
* ^
* > | [a]: b "c"
* ^
* ```
*
* @type {State}
*/
function after(code) {
return markdownSpace(code)
? factorySpace(effects, afterWhitespace, 'whitespace')(code)
: afterWhitespace(code)
}
/**
* After definition, after optional whitespace.
*
* ```markdown
* > | [a]: b
* ^
* > | [a]: b "c"
* ^
* ```
*
* @type {State}
*/
function afterWhitespace(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit('definition')
// Note: we don’t care about uniqueness.
// It’s likely that that doesn’t happen very frequently.
// It is more likely that it wastes precious time.
self.parser.defined.push(identifier)
// To do: `markdown-rs` interrupt.
// // You’d be interrupting.
// tokenizer.interrupt = true
return ok(code)
}
return nok(code)
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeTitleBefore(effects, ok, nok) {
return titleBefore
/**
* After destination, at whitespace.
*
* ```markdown
* > | [a]: b
* ^
* > | [a]: b "c"
* ^
* ```
*
* @type {State}
*/
function titleBefore(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, beforeMarker)(code)
: nok(code)
}
/**
* At title.
*
* ```markdown
* | [a]: b
* > | "c"
* ^
* ```
*
* @type {State}
*/
function beforeMarker(code) {
return factoryTitle(
effects,
titleAfter,
nok,
'definitionTitle',
'definitionTitleMarker',
'definitionTitleString'
)(code)
}
/**
* After title.
*
* ```markdown
* > | [a]: b "c"
* ^
* ```
*
* @type {State}
*/
function titleAfter(code) {
return markdownSpace(code)
? factorySpace(effects, titleAfterOptionalWhitespace, 'whitespace')(code)
: titleAfterOptionalWhitespace(code)
}
/**
* After title, after optional whitespace.
*
* ```markdown
* > | [a]: b "c"
* ^
* ```
*
* @type {State}
*/
function titleAfterOptionalWhitespace(code) {
return code === null || markdownLineEnding(code) ? ok(code) : nok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/code-indented.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const codeIndented = {
name: 'codeIndented',
tokenize: tokenizeCodeIndented
}
/** @type {Construct} */
const furtherStart = {
tokenize: tokenizeFurtherStart,
partial: true
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeCodeIndented(effects, ok, nok) {
const self = this
return start
/**
* Start of code (indented).
*
* > **Parsing note**: it is not needed to check if this first line is a
* > filled line (that it has a non-whitespace character), because blank lines
* > are parsed already, so we never run into that.
*
* ```markdown
* > | aaa
* ^
* ```
*
* @type {State}
*/
function start(code) {
// To do: manually check if interrupting like `markdown-rs`.
effects.enter('codeIndented')
// To do: use an improved `space_or_tab` function like `markdown-rs`,
// so that we can drop the next state.
return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)(code)
}
/**
* At start, after 1 or 4 spaces.
*
* ```markdown
* > | aaa
* ^
* ```
*
* @type {State}
*/
function afterPrefix(code) {
const tail = self.events[self.events.length - 1]
return tail &&
tail[1].type === 'linePrefix' &&
tail[2].sliceSerialize(tail[1], true).length >= 4
? atBreak(code)
: nok(code)
}
/**
* At a break.
*
* ```markdown
* > | aaa
* ^ ^
* ```
*
* @type {State}
*/
function atBreak(code) {
if (code === null) {
return after(code)
}
if (markdownLineEnding(code)) {
return effects.attempt(furtherStart, atBreak, after)(code)
}
effects.enter('codeFlowValue')
return inside(code)
}
/**
* In code content.
*
* ```markdown
* > | aaa
* ^^^^
* ```
*
* @type {State}
*/
function inside(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit('codeFlowValue')
return atBreak(code)
}
effects.consume(code)
return inside
}
/** @type {State} */
function after(code) {
effects.exit('codeIndented')
// To do: allow interrupting like `markdown-rs`.
// Feel free to interrupt.
// tokenizer.interrupt = false
return ok(code)
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeFurtherStart(effects, ok, nok) {
const self = this
return furtherStart
/**
* At eol, trying to parse another indent.
*
* ```markdown
* > | aaa
* ^
* | bbb
* ```
*
* @type {State}
*/
function furtherStart(code) {
// To do: improve `lazy` / `pierce` handling.
// If this is a lazy line, it can’t be code.
if (self.parser.lazy[self.now().line]) {
return nok(code)
}
if (markdownLineEnding(code)) {
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return furtherStart
}
// To do: the code here in `micromark-js` is a bit different from
// `markdown-rs` because there it can attempt spaces.
// We can’t yet.
//
// To do: use an improved `space_or_tab` function like `markdown-rs`,
// so that we can drop the next state.
return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)(code)
}
/**
* At start, after 1 or 4 spaces.
*
* ```markdown
* > | aaa
* ^
* ```
*
* @type {State}
*/
function afterPrefix(code) {
const tail = self.events[self.events.length - 1]
return tail &&
tail[1].type === 'linePrefix' &&
tail[2].sliceSerialize(tail[1], true).length >= 4
? ok(code)
: markdownLineEnding(code)
? furtherStart(code)
: nok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/heading-atx.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').Resolver} Resolver
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').Token} Token
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const headingAtx = {
name: 'headingAtx',
tokenize: tokenizeHeadingAtx,
resolve: resolveHeadingAtx
}
/** @type {Resolver} */
function resolveHeadingAtx(events, context) {
let contentEnd = events.length - 2
let contentStart = 3
/** @type {Token} */
let content
/** @type {Token} */
let text
// Prefix whitespace, part of the opening.
if (events[contentStart][1].type === 'whitespace') {
contentStart += 2
}
// Suffix whitespace, part of the closing.
if (
contentEnd - 2 > contentStart &&
events[contentEnd][1].type === 'whitespace'
) {
contentEnd -= 2
}
if (
events[contentEnd][1].type === 'atxHeadingSequence' &&
(contentStart === contentEnd - 1 ||
(contentEnd - 4 > contentStart &&
events[contentEnd - 2][1].type === 'whitespace'))
) {
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4
}
if (contentEnd > contentStart) {
content = {
type: 'atxHeadingText',
start: events[contentStart][1].start,
end: events[contentEnd][1].end
}
text = {
type: 'chunkText',
start: events[contentStart][1].start,
end: events[contentEnd][1].end,
contentType: 'text'
}
splice(events, contentStart, contentEnd - contentStart + 1, [
['enter', content, context],
['enter', text, context],
['exit', text, context],
['exit', content, context]
])
}
return events
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeHeadingAtx(effects, ok, nok) {
let size = 0
return start
/**
* Start of a heading (atx).
*
* ```markdown
* > | ## aa
* ^
* ```
*
* @type {State}
*/
function start(code) {
// To do: parse indent like `markdown-rs`.
effects.enter('atxHeading')
return before(code)
}
/**
* After optional whitespace, at `#`.
*
* ```markdown
* > | ## aa
* ^
* ```
*
* @type {State}
*/
function before(code) {
effects.enter('atxHeadingSequence')
return sequenceOpen(code)
}
/**
* In opening sequence.
*
* ```markdown
* > | ## aa
* ^
* ```
*
* @type {State}
*/
function sequenceOpen(code) {
if (code === 35 && size++ < 6) {
effects.consume(code)
return sequenceOpen
}
// Always at least one `#`.
if (code === null || markdownLineEndingOrSpace(code)) {
effects.exit('atxHeadingSequence')
return atBreak(code)
}
return nok(code)
}
/**
* After something, before something else.
*
* ```markdown
* > | ## aa
* ^
* ```
*
* @type {State}
*/
function atBreak(code) {
if (code === 35) {
effects.enter('atxHeadingSequence')
return sequenceFurther(code)
}
if (code === null || markdownLineEnding(code)) {
effects.exit('atxHeading')
// To do: interrupt like `markdown-rs`.
// // Feel free to interrupt.
// tokenizer.interrupt = false
return ok(code)
}
if (markdownSpace(code)) {
return factorySpace(effects, atBreak, 'whitespace')(code)
}
// To do: generate `data` tokens, add the `text` token later.
// Needs edit map, see: `markdown.rs`.
effects.enter('atxHeadingText')
return data(code)
}
/**
* In further sequence (after whitespace).
*
* Could be normal “visible” hashes in the heading or a final sequence.
*
* ```markdown
* > | ## aa ##
* ^
* ```
*
* @type {State}
*/
function sequenceFurther(code) {
if (code === 35) {
effects.consume(code)
return sequenceFurther
}
effects.exit('atxHeadingSequence')
return atBreak(code)
}
/**
* In text.
*
* ```markdown
* > | ## aa
* ^
* ```
*
* @type {State}
*/
function data(code) {
if (code === null || code === 35 || markdownLineEndingOrSpace(code)) {
effects.exit('atxHeadingText')
return atBreak(code)
}
effects.consume(code)
return data
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/setext-underline.js
/**
* @typedef {import('micromark-util-types').Code} Code
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').Resolver} Resolver
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const setextUnderline = {
name: 'setextUnderline',
tokenize: tokenizeSetextUnderline,
resolveTo: resolveToSetextUnderline
}
/** @type {Resolver} */
function resolveToSetextUnderline(events, context) {
// To do: resolve like `markdown-rs`.
let index = events.length
/** @type {number | undefined} */
let content
/** @type {number | undefined} */
let text
/** @type {number | undefined} */
let definition
// Find the opening of the content.
// It’ll always exist: we don’t tokenize if it isn’t there.
while (index--) {
if (events[index][0] === 'enter') {
if (events[index][1].type === 'content') {
content = index
break
}
if (events[index][1].type === 'paragraph') {
text = index
}
}
// Exit
else {
if (events[index][1].type === 'content') {
// Remove the content end (if needed we’ll add it later)
events.splice(index, 1)
}
if (!definition && events[index][1].type === 'definition') {
definition = index
}
}
}
const heading = {
type: 'setextHeading',
start: Object.assign({}, events[text][1].start),
end: Object.assign({}, events[events.length - 1][1].end)
}
// Change the paragraph to setext heading text.
events[text][1].type = 'setextHeadingText'
// If we have definitions in the content, we’ll keep on having content,
// but we need move it.
if (definition) {
events.splice(text, 0, ['enter', heading, context])
events.splice(definition + 1, 0, ['exit', events[content][1], context])
events[content][1].end = Object.assign({}, events[definition][1].end)
} else {
events[content][1] = heading
}
// Add the heading exit at the end.
events.push(['exit', heading, context])
return events
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeSetextUnderline(effects, ok, nok) {
const self = this
/** @type {NonNullable} */
let marker
return start
/**
* At start of heading (setext) underline.
*
* ```markdown
* | aa
* > | ==
* ^
* ```
*
* @type {State}
*/
function start(code) {
let index = self.events.length
/** @type {boolean | undefined} */
let paragraph
// Find an opening.
while (index--) {
// Skip enter/exit of line ending, line prefix, and content.
// We can now either have a definition or a paragraph.
if (
self.events[index][1].type !== 'lineEnding' &&
self.events[index][1].type !== 'linePrefix' &&
self.events[index][1].type !== 'content'
) {
paragraph = self.events[index][1].type === 'paragraph'
break
}
}
// To do: handle lazy/pierce like `markdown-rs`.
// To do: parse indent like `markdown-rs`.
if (!self.parser.lazy[self.now().line] && (self.interrupt || paragraph)) {
effects.enter('setextHeadingLine')
marker = code
return before(code)
}
return nok(code)
}
/**
* After optional whitespace, at `-` or `=`.
*
* ```markdown
* | aa
* > | ==
* ^
* ```
*
* @type {State}
*/
function before(code) {
effects.enter('setextHeadingLineSequence')
return inside(code)
}
/**
* In sequence.
*
* ```markdown
* | aa
* > | ==
* ^
* ```
*
* @type {State}
*/
function inside(code) {
if (code === marker) {
effects.consume(code)
return inside
}
effects.exit('setextHeadingLineSequence')
return markdownSpace(code)
? factorySpace(effects, after, 'lineSuffix')(code)
: after(code)
}
/**
* After sequence, after optional whitespace.
*
* ```markdown
* | aa
* > | ==
* ^
* ```
*
* @type {State}
*/
function after(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit('setextHeadingLine')
return ok(code)
}
return nok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-util-html-tag-name/index.js
/**
* List of lowercase HTML “block” tag names.
*
* The list, when parsing HTML (flow), results in more relaxed rules (condition
* 6).
* Because they are known blocks, the HTML-like syntax doesn’t have to be
* strictly parsed.
* For tag names not in this list, a more strict algorithm (condition 7) is used
* to detect whether the HTML-like syntax is seen as HTML (flow) or not.
*
* This is copied from:
* .
*
* > 👉 **Note**: `search` was added in `CommonMark@0.31`.
*/
const htmlBlockNames = [
'address',
'article',
'aside',
'base',
'basefont',
'blockquote',
'body',
'caption',
'center',
'col',
'colgroup',
'dd',
'details',
'dialog',
'dir',
'div',
'dl',
'dt',
'fieldset',
'figcaption',
'figure',
'footer',
'form',
'frame',
'frameset',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
'head',
'header',
'hr',
'html',
'iframe',
'legend',
'li',
'link',
'main',
'menu',
'menuitem',
'nav',
'noframes',
'ol',
'optgroup',
'option',
'p',
'param',
'search',
'section',
'summary',
'table',
'tbody',
'td',
'tfoot',
'th',
'thead',
'title',
'tr',
'track',
'ul'
]
/**
* List of lowercase HTML “raw” tag names.
*
* The list, when parsing HTML (flow), results in HTML that can include lines
* without exiting, until a closing tag also in this list is found (condition
* 1).
*
* This module is copied from:
* .
*
* > 👉 **Note**: `textarea` was added in `CommonMark@0.30`.
*/
const htmlRawNames = ['pre', 'script', 'style', 'textarea']
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/html-flow.js
/**
* @typedef {import('micromark-util-types').Code} Code
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').Resolver} Resolver
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const htmlFlow = {
name: 'htmlFlow',
tokenize: tokenizeHtmlFlow,
resolveTo: resolveToHtmlFlow,
concrete: true
}
/** @type {Construct} */
const blankLineBefore = {
tokenize: tokenizeBlankLineBefore,
partial: true
}
const nonLazyContinuationStart = {
tokenize: tokenizeNonLazyContinuationStart,
partial: true
}
/** @type {Resolver} */
function resolveToHtmlFlow(events) {
let index = events.length
while (index--) {
if (events[index][0] === 'enter' && events[index][1].type === 'htmlFlow') {
break
}
}
if (index > 1 && events[index - 2][1].type === 'linePrefix') {
// Add the prefix start to the HTML token.
events[index][1].start = events[index - 2][1].start
// Add the prefix start to the HTML line token.
events[index + 1][1].start = events[index - 2][1].start
// Remove the line prefix.
events.splice(index - 2, 2)
}
return events
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeHtmlFlow(effects, ok, nok) {
const self = this
/** @type {number} */
let marker
/** @type {boolean} */
let closingTag
/** @type {string} */
let buffer
/** @type {number} */
let index
/** @type {Code} */
let markerB
return start
/**
* Start of HTML (flow).
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function start(code) {
// To do: parse indent like `markdown-rs`.
return before(code)
}
/**
* At `<`, after optional whitespace.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function before(code) {
effects.enter('htmlFlow')
effects.enter('htmlFlowData')
effects.consume(code)
return open
}
/**
* After `<`, at tag name or other stuff.
*
* ```markdown
* > |
* ^
* > |
* ^
* > |
* ^
* ```
*
* @type {State}
*/
function open(code) {
if (code === 33) {
effects.consume(code)
return declarationOpen
}
if (code === 47) {
effects.consume(code)
closingTag = true
return tagCloseStart
}
if (code === 63) {
effects.consume(code)
marker = 3
// To do:
// tokenizer.concrete = true
// To do: use `markdown-rs` style interrupt.
// While we’re in an instruction instead of a declaration, we’re on a `?`
// right now, so we do need to search for `>`, similar to declarations.
return self.interrupt ? ok : continuationDeclarationInside
}
// ASCII alphabetical.
if (asciiAlpha(code)) {
effects.consume(code)
// @ts-expect-error: not null.
buffer = String.fromCharCode(code)
return tagName
}
return nok(code)
}
/**
* After ` |
* ^
* > |
* ^
* > | &<]]>
* ^
* ```
*
* @type {State}
*/
function declarationOpen(code) {
if (code === 45) {
effects.consume(code)
marker = 2
return commentOpenInside
}
if (code === 91) {
effects.consume(code)
marker = 5
index = 0
return cdataOpenInside
}
// ASCII alphabetical.
if (asciiAlpha(code)) {
effects.consume(code)
marker = 4
// // Do not form containers.
// tokenizer.concrete = true
return self.interrupt ? ok : continuationDeclarationInside
}
return nok(code)
}
/**
* After ` |
* ^
* ```
*
* @type {State}
*/
function commentOpenInside(code) {
if (code === 45) {
effects.consume(code)
// // Do not form containers.
// tokenizer.concrete = true
return self.interrupt ? ok : continuationDeclarationInside
}
return nok(code)
}
/**
* After ` | &<]]>
* ^^^^^^
* ```
*
* @type {State}
*/
function cdataOpenInside(code) {
const value = 'CDATA['
if (code === value.charCodeAt(index++)) {
effects.consume(code)
if (index === value.length) {
// // Do not form containers.
// tokenizer.concrete = true
return self.interrupt ? ok : continuation
}
return cdataOpenInside
}
return nok(code)
}
/**
* After ``, in closing tag, at tag name.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function tagCloseStart(code) {
if (asciiAlpha(code)) {
effects.consume(code)
// @ts-expect-error: not null.
buffer = String.fromCharCode(code)
return tagName
}
return nok(code)
}
/**
* In tag name.
*
* ```markdown
* > |
* ^^
* > |
* ^^
* ```
*
* @type {State}
*/
function tagName(code) {
if (
code === null ||
code === 47 ||
code === 62 ||
markdownLineEndingOrSpace(code)
) {
const slash = code === 47
const name = buffer.toLowerCase()
if (!slash && !closingTag && htmlRawNames.includes(name)) {
marker = 1
// // Do not form containers.
// tokenizer.concrete = true
return self.interrupt ? ok(code) : continuation(code)
}
if (htmlBlockNames.includes(buffer.toLowerCase())) {
marker = 6
if (slash) {
effects.consume(code)
return basicSelfClosing
}
// // Do not form containers.
// tokenizer.concrete = true
return self.interrupt ? ok(code) : continuation(code)
}
marker = 7
// Do not support complete HTML when interrupting.
return self.interrupt && !self.parser.lazy[self.now().line]
? nok(code)
: closingTag
? completeClosingTagAfter(code)
: completeAttributeNameBefore(code)
}
// ASCII alphanumerical and `-`.
if (code === 45 || asciiAlphanumeric(code)) {
effects.consume(code)
buffer += String.fromCharCode(code)
return tagName
}
return nok(code)
}
/**
* After closing slash of a basic tag name.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function basicSelfClosing(code) {
if (code === 62) {
effects.consume(code)
// // Do not form containers.
// tokenizer.concrete = true
return self.interrupt ? ok : continuation
}
return nok(code)
}
/**
* After closing slash of a complete tag name.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function completeClosingTagAfter(code) {
if (markdownSpace(code)) {
effects.consume(code)
return completeClosingTagAfter
}
return completeEnd(code)
}
/**
* At an attribute name.
*
* At first, this state is used after a complete tag name, after whitespace,
* where it expects optional attributes or the end of the tag.
* It is also reused after attributes, when expecting more optional
* attributes.
*
* ```markdown
* > |
* ^
* > |
* ^
* > |
* ^
* > |
* ^
* > |
* ^
* ```
*
* @type {State}
*/
function completeAttributeNameBefore(code) {
if (code === 47) {
effects.consume(code)
return completeEnd
}
// ASCII alphanumerical and `:` and `_`.
if (code === 58 || code === 95 || asciiAlpha(code)) {
effects.consume(code)
return completeAttributeName
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeNameBefore
}
return completeEnd(code)
}
/**
* In attribute name.
*
* ```markdown
* > |
* ^
* > |
* ^
* > |
* ^
* ```
*
* @type {State}
*/
function completeAttributeName(code) {
// ASCII alphanumerical and `-`, `.`, `:`, and `_`.
if (
code === 45 ||
code === 46 ||
code === 58 ||
code === 95 ||
asciiAlphanumeric(code)
) {
effects.consume(code)
return completeAttributeName
}
return completeAttributeNameAfter(code)
}
/**
* After attribute name, at an optional initializer, the end of the tag, or
* whitespace.
*
* ```markdown
* > |
* ^
* > |
* ^
* ```
*
* @type {State}
*/
function completeAttributeNameAfter(code) {
if (code === 61) {
effects.consume(code)
return completeAttributeValueBefore
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeNameAfter
}
return completeAttributeNameBefore(code)
}
/**
* Before unquoted, double quoted, or single quoted attribute value, allowing
* whitespace.
*
* ```markdown
* > |
* ^
* > |
* ^
* ```
*
* @type {State}
*/
function completeAttributeValueBefore(code) {
if (
code === null ||
code === 60 ||
code === 61 ||
code === 62 ||
code === 96
) {
return nok(code)
}
if (code === 34 || code === 39) {
effects.consume(code)
markerB = code
return completeAttributeValueQuoted
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeValueBefore
}
return completeAttributeValueUnquoted(code)
}
/**
* In double or single quoted attribute value.
*
* ```markdown
* > |
* ^
* > |
* ^
* ```
*
* @type {State}
*/
function completeAttributeValueQuoted(code) {
if (code === markerB) {
effects.consume(code)
markerB = null
return completeAttributeValueQuotedAfter
}
if (code === null || markdownLineEnding(code)) {
return nok(code)
}
effects.consume(code)
return completeAttributeValueQuoted
}
/**
* In unquoted attribute value.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function completeAttributeValueUnquoted(code) {
if (
code === null ||
code === 34 ||
code === 39 ||
code === 47 ||
code === 60 ||
code === 61 ||
code === 62 ||
code === 96 ||
markdownLineEndingOrSpace(code)
) {
return completeAttributeNameAfter(code)
}
effects.consume(code)
return completeAttributeValueUnquoted
}
/**
* After double or single quoted attribute value, before whitespace or the
* end of the tag.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function completeAttributeValueQuotedAfter(code) {
if (code === 47 || code === 62 || markdownSpace(code)) {
return completeAttributeNameBefore(code)
}
return nok(code)
}
/**
* In certain circumstances of a complete tag where only an `>` is allowed.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function completeEnd(code) {
if (code === 62) {
effects.consume(code)
return completeAfter
}
return nok(code)
}
/**
* After `>` in a complete tag.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function completeAfter(code) {
if (code === null || markdownLineEnding(code)) {
// // Do not form containers.
// tokenizer.concrete = true
return continuation(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAfter
}
return nok(code)
}
/**
* In continuation of any HTML kind.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function continuation(code) {
if (code === 45 && marker === 2) {
effects.consume(code)
return continuationCommentInside
}
if (code === 60 && marker === 1) {
effects.consume(code)
return continuationRawTagOpen
}
if (code === 62 && marker === 4) {
effects.consume(code)
return continuationClose
}
if (code === 63 && marker === 3) {
effects.consume(code)
return continuationDeclarationInside
}
if (code === 93 && marker === 5) {
effects.consume(code)
return continuationCdataInside
}
if (markdownLineEnding(code) && (marker === 6 || marker === 7)) {
effects.exit('htmlFlowData')
return effects.check(
blankLineBefore,
continuationAfter,
continuationStart
)(code)
}
if (code === null || markdownLineEnding(code)) {
effects.exit('htmlFlowData')
return continuationStart(code)
}
effects.consume(code)
return continuation
}
/**
* In continuation, at eol.
*
* ```markdown
* > |
* ^
* | asd
* ```
*
* @type {State}
*/
function continuationStart(code) {
return effects.check(
nonLazyContinuationStart,
continuationStartNonLazy,
continuationAfter
)(code)
}
/**
* In continuation, at eol, before non-lazy content.
*
* ```markdown
* > |
* ^
* | asd
* ```
*
* @type {State}
*/
function continuationStartNonLazy(code) {
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return continuationBefore
}
/**
* In continuation, before non-lazy content.
*
* ```markdown
* |
* > | asd
* ^
* ```
*
* @type {State}
*/
function continuationBefore(code) {
if (code === null || markdownLineEnding(code)) {
return continuationStart(code)
}
effects.enter('htmlFlowData')
return continuation(code)
}
/**
* In comment continuation, after one `-`, expecting another.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function continuationCommentInside(code) {
if (code === 45) {
effects.consume(code)
return continuationDeclarationInside
}
return continuation(code)
}
/**
* In raw continuation, after `<`, at `/`.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function continuationRawTagOpen(code) {
if (code === 47) {
effects.consume(code)
buffer = ''
return continuationRawEndTag
}
return continuation(code)
}
/**
* In raw continuation, after ``, in a raw tag name.
*
* ```markdown
* > |
* ^^^^^^
* ```
*
* @type {State}
*/
function continuationRawEndTag(code) {
if (code === 62) {
const name = buffer.toLowerCase()
if (htmlRawNames.includes(name)) {
effects.consume(code)
return continuationClose
}
return continuation(code)
}
if (asciiAlpha(code) && buffer.length < 8) {
effects.consume(code)
// @ts-expect-error: not null.
buffer += String.fromCharCode(code)
return continuationRawEndTag
}
return continuation(code)
}
/**
* In cdata continuation, after `]`, expecting `]>`.
*
* ```markdown
* > | &<]]>
* ^
* ```
*
* @type {State}
*/
function continuationCdataInside(code) {
if (code === 93) {
effects.consume(code)
return continuationDeclarationInside
}
return continuation(code)
}
/**
* In declaration or instruction continuation, at `>`.
*
* ```markdown
* > |
* ^
* > | >
* ^
* > |
* ^
* > |
* ^
* > | &<]]>
* ^
* ```
*
* @type {State}
*/
function continuationDeclarationInside(code) {
if (code === 62) {
effects.consume(code)
return continuationClose
}
// More dashes.
if (code === 45 && marker === 2) {
effects.consume(code)
return continuationDeclarationInside
}
return continuation(code)
}
/**
* In closed continuation: everything we get until the eol/eof is part of it.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function continuationClose(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit('htmlFlowData')
return continuationAfter(code)
}
effects.consume(code)
return continuationClose
}
/**
* Done.
*
* ```markdown
* > |
* ^
* ```
*
* @type {State}
*/
function continuationAfter(code) {
effects.exit('htmlFlow')
// // Feel free to interrupt.
// tokenizer.interrupt = false
// // No longer concrete.
// tokenizer.concrete = false
return ok(code)
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeNonLazyContinuationStart(effects, ok, nok) {
const self = this
return start
/**
* At eol, before continuation.
*
* ```markdown
* > | * ```js
* ^
* | b
* ```
*
* @type {State}
*/
function start(code) {
if (markdownLineEnding(code)) {
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return after
}
return nok(code)
}
/**
* A continuation.
*
* ```markdown
* | * ```js
* > | b
* ^
* ```
*
* @type {State}
*/
function after(code) {
return self.parser.lazy[self.now().line] ? nok(code) : ok(code)
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeBlankLineBefore(effects, ok, nok) {
return start
/**
* Before eol, expecting blank line.
*
* ```markdown
* > |
* ^
* |
* ```
*
* @type {State}
*/
function start(code) {
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return effects.attempt(blankLine, ok, nok)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/code-fenced.js
/**
* @typedef {import('micromark-util-types').Code} Code
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const nonLazyContinuation = {
tokenize: tokenizeNonLazyContinuation,
partial: true
}
/** @type {Construct} */
const codeFenced = {
name: 'codeFenced',
tokenize: tokenizeCodeFenced,
concrete: true
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeCodeFenced(effects, ok, nok) {
const self = this
/** @type {Construct} */
const closeStart = {
tokenize: tokenizeCloseStart,
partial: true
}
let initialPrefix = 0
let sizeOpen = 0
/** @type {NonNullable} */
let marker
return start
/**
* Start of code.
*
* ```markdown
* > | ~~~js
* ^
* | alert(1)
* | ~~~
* ```
*
* @type {State}
*/
function start(code) {
// To do: parse whitespace like `markdown-rs`.
return beforeSequenceOpen(code)
}
/**
* In opening fence, after prefix, at sequence.
*
* ```markdown
* > | ~~~js
* ^
* | alert(1)
* | ~~~
* ```
*
* @type {State}
*/
function beforeSequenceOpen(code) {
const tail = self.events[self.events.length - 1]
initialPrefix =
tail && tail[1].type === 'linePrefix'
? tail[2].sliceSerialize(tail[1], true).length
: 0
marker = code
effects.enter('codeFenced')
effects.enter('codeFencedFence')
effects.enter('codeFencedFenceSequence')
return sequenceOpen(code)
}
/**
* In opening fence sequence.
*
* ```markdown
* > | ~~~js
* ^
* | alert(1)
* | ~~~
* ```
*
* @type {State}
*/
function sequenceOpen(code) {
if (code === marker) {
sizeOpen++
effects.consume(code)
return sequenceOpen
}
if (sizeOpen < 3) {
return nok(code)
}
effects.exit('codeFencedFenceSequence')
return markdownSpace(code)
? factorySpace(effects, infoBefore, 'whitespace')(code)
: infoBefore(code)
}
/**
* In opening fence, after the sequence (and optional whitespace), before info.
*
* ```markdown
* > | ~~~js
* ^
* | alert(1)
* | ~~~
* ```
*
* @type {State}
*/
function infoBefore(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit('codeFencedFence')
return self.interrupt
? ok(code)
: effects.check(nonLazyContinuation, atNonLazyBreak, after)(code)
}
effects.enter('codeFencedFenceInfo')
effects.enter('chunkString', {
contentType: 'string'
})
return info(code)
}
/**
* In info.
*
* ```markdown
* > | ~~~js
* ^
* | alert(1)
* | ~~~
* ```
*
* @type {State}
*/
function info(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit('chunkString')
effects.exit('codeFencedFenceInfo')
return infoBefore(code)
}
if (markdownSpace(code)) {
effects.exit('chunkString')
effects.exit('codeFencedFenceInfo')
return factorySpace(effects, metaBefore, 'whitespace')(code)
}
if (code === 96 && code === marker) {
return nok(code)
}
effects.consume(code)
return info
}
/**
* In opening fence, after info and whitespace, before meta.
*
* ```markdown
* > | ~~~js eval
* ^
* | alert(1)
* | ~~~
* ```
*
* @type {State}
*/
function metaBefore(code) {
if (code === null || markdownLineEnding(code)) {
return infoBefore(code)
}
effects.enter('codeFencedFenceMeta')
effects.enter('chunkString', {
contentType: 'string'
})
return meta(code)
}
/**
* In meta.
*
* ```markdown
* > | ~~~js eval
* ^
* | alert(1)
* | ~~~
* ```
*
* @type {State}
*/
function meta(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit('chunkString')
effects.exit('codeFencedFenceMeta')
return infoBefore(code)
}
if (code === 96 && code === marker) {
return nok(code)
}
effects.consume(code)
return meta
}
/**
* At eol/eof in code, before a non-lazy closing fence or content.
*
* ```markdown
* > | ~~~js
* ^
* > | alert(1)
* ^
* | ~~~
* ```
*
* @type {State}
*/
function atNonLazyBreak(code) {
return effects.attempt(closeStart, after, contentBefore)(code)
}
/**
* Before code content, not a closing fence, at eol.
*
* ```markdown
* | ~~~js
* > | alert(1)
* ^
* | ~~~
* ```
*
* @type {State}
*/
function contentBefore(code) {
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return contentStart
}
/**
* Before code content, not a closing fence.
*
* ```markdown
* | ~~~js
* > | alert(1)
* ^
* | ~~~
* ```
*
* @type {State}
*/
function contentStart(code) {
return initialPrefix > 0 && markdownSpace(code)
? factorySpace(
effects,
beforeContentChunk,
'linePrefix',
initialPrefix + 1
)(code)
: beforeContentChunk(code)
}
/**
* Before code content, after optional prefix.
*
* ```markdown
* | ~~~js
* > | alert(1)
* ^
* | ~~~
* ```
*
* @type {State}
*/
function beforeContentChunk(code) {
if (code === null || markdownLineEnding(code)) {
return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code)
}
effects.enter('codeFlowValue')
return contentChunk(code)
}
/**
* In code content.
*
* ```markdown
* | ~~~js
* > | alert(1)
* ^^^^^^^^
* | ~~~
* ```
*
* @type {State}
*/
function contentChunk(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit('codeFlowValue')
return beforeContentChunk(code)
}
effects.consume(code)
return contentChunk
}
/**
* After code.
*
* ```markdown
* | ~~~js
* | alert(1)
* > | ~~~
* ^
* ```
*
* @type {State}
*/
function after(code) {
effects.exit('codeFenced')
return ok(code)
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeCloseStart(effects, ok, nok) {
let size = 0
return startBefore
/**
*
*
* @type {State}
*/
function startBefore(code) {
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return start
}
/**
* Before closing fence, at optional whitespace.
*
* ```markdown
* | ~~~js
* | alert(1)
* > | ~~~
* ^
* ```
*
* @type {State}
*/
function start(code) {
// Always populated by defaults.
// To do: `enter` here or in next state?
effects.enter('codeFencedFence')
return markdownSpace(code)
? factorySpace(
effects,
beforeSequenceClose,
'linePrefix',
self.parser.constructs.disable.null.includes('codeIndented')
? undefined
: 4
)(code)
: beforeSequenceClose(code)
}
/**
* In closing fence, after optional whitespace, at sequence.
*
* ```markdown
* | ~~~js
* | alert(1)
* > | ~~~
* ^
* ```
*
* @type {State}
*/
function beforeSequenceClose(code) {
if (code === marker) {
effects.enter('codeFencedFenceSequence')
return sequenceClose(code)
}
return nok(code)
}
/**
* In closing fence sequence.
*
* ```markdown
* | ~~~js
* | alert(1)
* > | ~~~
* ^
* ```
*
* @type {State}
*/
function sequenceClose(code) {
if (code === marker) {
size++
effects.consume(code)
return sequenceClose
}
if (size >= sizeOpen) {
effects.exit('codeFencedFenceSequence')
return markdownSpace(code)
? factorySpace(effects, sequenceCloseAfter, 'whitespace')(code)
: sequenceCloseAfter(code)
}
return nok(code)
}
/**
* After closing fence sequence, after optional whitespace.
*
* ```markdown
* | ~~~js
* | alert(1)
* > | ~~~
* ^
* ```
*
* @type {State}
*/
function sequenceCloseAfter(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit('codeFencedFence')
return ok(code)
}
return nok(code)
}
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeNonLazyContinuation(effects, ok, nok) {
const self = this
return start
/**
*
*
* @type {State}
*/
function start(code) {
if (code === null) {
return nok(code)
}
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return lineStart
}
/**
*
*
* @type {State}
*/
function lineStart(code) {
return self.parser.lazy[self.now().line] ? nok(code) : ok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/character-entities/index.js
/**
* Map of named character references.
*
* @type {Record}
*/
const characterEntities = {
AElig: 'Æ',
AMP: '&',
Aacute: 'Á',
Abreve: 'Ă',
Acirc: 'Â',
Acy: 'А',
Afr: '𝔄',
Agrave: 'À',
Alpha: 'Α',
Amacr: 'Ā',
And: '⩓',
Aogon: 'Ą',
Aopf: '𝔸',
ApplyFunction: '',
Aring: 'Å',
Ascr: '𝒜',
Assign: '≔',
Atilde: 'Ã',
Auml: 'Ä',
Backslash: '∖',
Barv: '⫧',
Barwed: '⌆',
Bcy: 'Б',
Because: '∵',
Bernoullis: 'ℬ',
Beta: 'Β',
Bfr: '𝔅',
Bopf: '𝔹',
Breve: '˘',
Bscr: 'ℬ',
Bumpeq: '≎',
CHcy: 'Ч',
COPY: '©',
Cacute: 'Ć',
Cap: '⋒',
CapitalDifferentialD: 'ⅅ',
Cayleys: 'ℭ',
Ccaron: 'Č',
Ccedil: 'Ç',
Ccirc: 'Ĉ',
Cconint: '∰',
Cdot: 'Ċ',
Cedilla: '¸',
CenterDot: '·',
Cfr: 'ℭ',
Chi: 'Χ',
CircleDot: '⊙',
CircleMinus: '⊖',
CirclePlus: '⊕',
CircleTimes: '⊗',
ClockwiseContourIntegral: '∲',
CloseCurlyDoubleQuote: '”',
CloseCurlyQuote: '’',
Colon: '∷',
Colone: '⩴',
Congruent: '≡',
Conint: '∯',
ContourIntegral: '∮',
Copf: 'ℂ',
Coproduct: '∐',
CounterClockwiseContourIntegral: '∳',
Cross: '⨯',
Cscr: '𝒞',
Cup: '⋓',
CupCap: '≍',
DD: 'ⅅ',
DDotrahd: '⤑',
DJcy: 'Ђ',
DScy: 'Ѕ',
DZcy: 'Џ',
Dagger: '‡',
Darr: '↡',
Dashv: '⫤',
Dcaron: 'Ď',
Dcy: 'Д',
Del: '∇',
Delta: 'Δ',
Dfr: '𝔇',
DiacriticalAcute: '´',
DiacriticalDot: '˙',
DiacriticalDoubleAcute: '˝',
DiacriticalGrave: '`',
DiacriticalTilde: '˜',
Diamond: '⋄',
DifferentialD: 'ⅆ',
Dopf: '𝔻',
Dot: '¨',
DotDot: '⃜',
DotEqual: '≐',
DoubleContourIntegral: '∯',
DoubleDot: '¨',
DoubleDownArrow: '⇓',
DoubleLeftArrow: '⇐',
DoubleLeftRightArrow: '⇔',
DoubleLeftTee: '⫤',
DoubleLongLeftArrow: '⟸',
DoubleLongLeftRightArrow: '⟺',
DoubleLongRightArrow: '⟹',
DoubleRightArrow: '⇒',
DoubleRightTee: '⊨',
DoubleUpArrow: '⇑',
DoubleUpDownArrow: '⇕',
DoubleVerticalBar: '∥',
DownArrow: '↓',
DownArrowBar: '⤓',
DownArrowUpArrow: '⇵',
DownBreve: '̑',
DownLeftRightVector: '⥐',
DownLeftTeeVector: '⥞',
DownLeftVector: '↽',
DownLeftVectorBar: '⥖',
DownRightTeeVector: '⥟',
DownRightVector: '⇁',
DownRightVectorBar: '⥗',
DownTee: '⊤',
DownTeeArrow: '↧',
Downarrow: '⇓',
Dscr: '𝒟',
Dstrok: 'Đ',
ENG: 'Ŋ',
ETH: 'Ð',
Eacute: 'É',
Ecaron: 'Ě',
Ecirc: 'Ê',
Ecy: 'Э',
Edot: 'Ė',
Efr: '𝔈',
Egrave: 'È',
Element: '∈',
Emacr: 'Ē',
EmptySmallSquare: '◻',
EmptyVerySmallSquare: '▫',
Eogon: 'Ę',
Eopf: '𝔼',
Epsilon: 'Ε',
Equal: '⩵',
EqualTilde: '≂',
Equilibrium: '⇌',
Escr: 'ℰ',
Esim: '⩳',
Eta: 'Η',
Euml: 'Ë',
Exists: '∃',
ExponentialE: 'ⅇ',
Fcy: 'Ф',
Ffr: '𝔉',
FilledSmallSquare: '◼',
FilledVerySmallSquare: '▪',
Fopf: '𝔽',
ForAll: '∀',
Fouriertrf: 'ℱ',
Fscr: 'ℱ',
GJcy: 'Ѓ',
GT: '>',
Gamma: 'Γ',
Gammad: 'Ϝ',
Gbreve: 'Ğ',
Gcedil: 'Ģ',
Gcirc: 'Ĝ',
Gcy: 'Г',
Gdot: 'Ġ',
Gfr: '𝔊',
Gg: '⋙',
Gopf: '𝔾',
GreaterEqual: '≥',
GreaterEqualLess: '⋛',
GreaterFullEqual: '≧',
GreaterGreater: '⪢',
GreaterLess: '≷',
GreaterSlantEqual: '⩾',
GreaterTilde: '≳',
Gscr: '𝒢',
Gt: '≫',
HARDcy: 'Ъ',
Hacek: 'ˇ',
Hat: '^',
Hcirc: 'Ĥ',
Hfr: 'ℌ',
HilbertSpace: 'ℋ',
Hopf: 'ℍ',
HorizontalLine: '─',
Hscr: 'ℋ',
Hstrok: 'Ħ',
HumpDownHump: '≎',
HumpEqual: '≏',
IEcy: 'Е',
IJlig: 'IJ',
IOcy: 'Ё',
Iacute: 'Í',
Icirc: 'Î',
Icy: 'И',
Idot: 'İ',
Ifr: 'ℑ',
Igrave: 'Ì',
Im: 'ℑ',
Imacr: 'Ī',
ImaginaryI: 'ⅈ',
Implies: '⇒',
Int: '∬',
Integral: '∫',
Intersection: '⋂',
InvisibleComma: '',
InvisibleTimes: '',
Iogon: 'Į',
Iopf: '𝕀',
Iota: 'Ι',
Iscr: 'ℐ',
Itilde: 'Ĩ',
Iukcy: 'І',
Iuml: 'Ï',
Jcirc: 'Ĵ',
Jcy: 'Й',
Jfr: '𝔍',
Jopf: '𝕁',
Jscr: '𝒥',
Jsercy: 'Ј',
Jukcy: 'Є',
KHcy: 'Х',
KJcy: 'Ќ',
Kappa: 'Κ',
Kcedil: 'Ķ',
Kcy: 'К',
Kfr: '𝔎',
Kopf: '𝕂',
Kscr: '𝒦',
LJcy: 'Љ',
LT: '<',
Lacute: 'Ĺ',
Lambda: 'Λ',
Lang: '⟪',
Laplacetrf: 'ℒ',
Larr: '↞',
Lcaron: 'Ľ',
Lcedil: 'Ļ',
Lcy: 'Л',
LeftAngleBracket: '⟨',
LeftArrow: '←',
LeftArrowBar: '⇤',
LeftArrowRightArrow: '⇆',
LeftCeiling: '⌈',
LeftDoubleBracket: '⟦',
LeftDownTeeVector: '⥡',
LeftDownVector: '⇃',
LeftDownVectorBar: '⥙',
LeftFloor: '⌊',
LeftRightArrow: '↔',
LeftRightVector: '⥎',
LeftTee: '⊣',
LeftTeeArrow: '↤',
LeftTeeVector: '⥚',
LeftTriangle: '⊲',
LeftTriangleBar: '⧏',
LeftTriangleEqual: '⊴',
LeftUpDownVector: '⥑',
LeftUpTeeVector: '⥠',
LeftUpVector: '↿',
LeftUpVectorBar: '⥘',
LeftVector: '↼',
LeftVectorBar: '⥒',
Leftarrow: '⇐',
Leftrightarrow: '⇔',
LessEqualGreater: '⋚',
LessFullEqual: '≦',
LessGreater: '≶',
LessLess: '⪡',
LessSlantEqual: '⩽',
LessTilde: '≲',
Lfr: '𝔏',
Ll: '⋘',
Lleftarrow: '⇚',
Lmidot: 'Ŀ',
LongLeftArrow: '⟵',
LongLeftRightArrow: '⟷',
LongRightArrow: '⟶',
Longleftarrow: '⟸',
Longleftrightarrow: '⟺',
Longrightarrow: '⟹',
Lopf: '𝕃',
LowerLeftArrow: '↙',
LowerRightArrow: '↘',
Lscr: 'ℒ',
Lsh: '↰',
Lstrok: 'Ł',
Lt: '≪',
Map: '⤅',
Mcy: 'М',
MediumSpace: ' ',
Mellintrf: 'ℳ',
Mfr: '𝔐',
MinusPlus: '∓',
Mopf: '𝕄',
Mscr: 'ℳ',
Mu: 'Μ',
NJcy: 'Њ',
Nacute: 'Ń',
Ncaron: 'Ň',
Ncedil: 'Ņ',
Ncy: 'Н',
NegativeMediumSpace: '',
NegativeThickSpace: '',
NegativeThinSpace: '',
NegativeVeryThinSpace: '',
NestedGreaterGreater: '≫',
NestedLessLess: '≪',
NewLine: '\n',
Nfr: '𝔑',
NoBreak: '',
NonBreakingSpace: ' ',
Nopf: 'ℕ',
Not: '⫬',
NotCongruent: '≢',
NotCupCap: '≭',
NotDoubleVerticalBar: '∦',
NotElement: '∉',
NotEqual: '≠',
NotEqualTilde: '≂̸',
NotExists: '∄',
NotGreater: '≯',
NotGreaterEqual: '≱',
NotGreaterFullEqual: '≧̸',
NotGreaterGreater: '≫̸',
NotGreaterLess: '≹',
NotGreaterSlantEqual: '⩾̸',
NotGreaterTilde: '≵',
NotHumpDownHump: '≎̸',
NotHumpEqual: '≏̸',
NotLeftTriangle: '⋪',
NotLeftTriangleBar: '⧏̸',
NotLeftTriangleEqual: '⋬',
NotLess: '≮',
NotLessEqual: '≰',
NotLessGreater: '≸',
NotLessLess: '≪̸',
NotLessSlantEqual: '⩽̸',
NotLessTilde: '≴',
NotNestedGreaterGreater: '⪢̸',
NotNestedLessLess: '⪡̸',
NotPrecedes: '⊀',
NotPrecedesEqual: '⪯̸',
NotPrecedesSlantEqual: '⋠',
NotReverseElement: '∌',
NotRightTriangle: '⋫',
NotRightTriangleBar: '⧐̸',
NotRightTriangleEqual: '⋭',
NotSquareSubset: '⊏̸',
NotSquareSubsetEqual: '⋢',
NotSquareSuperset: '⊐̸',
NotSquareSupersetEqual: '⋣',
NotSubset: '⊂⃒',
NotSubsetEqual: '⊈',
NotSucceeds: '⊁',
NotSucceedsEqual: '⪰̸',
NotSucceedsSlantEqual: '⋡',
NotSucceedsTilde: '≿̸',
NotSuperset: '⊃⃒',
NotSupersetEqual: '⊉',
NotTilde: '≁',
NotTildeEqual: '≄',
NotTildeFullEqual: '≇',
NotTildeTilde: '≉',
NotVerticalBar: '∤',
Nscr: '𝒩',
Ntilde: 'Ñ',
Nu: 'Ν',
OElig: 'Œ',
Oacute: 'Ó',
Ocirc: 'Ô',
Ocy: 'О',
Odblac: 'Ő',
Ofr: '𝔒',
Ograve: 'Ò',
Omacr: 'Ō',
Omega: 'Ω',
Omicron: 'Ο',
Oopf: '𝕆',
OpenCurlyDoubleQuote: '“',
OpenCurlyQuote: '‘',
Or: '⩔',
Oscr: '𝒪',
Oslash: 'Ø',
Otilde: 'Õ',
Otimes: '⨷',
Ouml: 'Ö',
OverBar: '‾',
OverBrace: '⏞',
OverBracket: '⎴',
OverParenthesis: '⏜',
PartialD: '∂',
Pcy: 'П',
Pfr: '𝔓',
Phi: 'Φ',
Pi: 'Π',
PlusMinus: '±',
Poincareplane: 'ℌ',
Popf: 'ℙ',
Pr: '⪻',
Precedes: '≺',
PrecedesEqual: '⪯',
PrecedesSlantEqual: '≼',
PrecedesTilde: '≾',
Prime: '″',
Product: '∏',
Proportion: '∷',
Proportional: '∝',
Pscr: '𝒫',
Psi: 'Ψ',
QUOT: '"',
Qfr: '𝔔',
Qopf: 'ℚ',
Qscr: '𝒬',
RBarr: '⤐',
REG: '®',
Racute: 'Ŕ',
Rang: '⟫',
Rarr: '↠',
Rarrtl: '⤖',
Rcaron: 'Ř',
Rcedil: 'Ŗ',
Rcy: 'Р',
Re: 'ℜ',
ReverseElement: '∋',
ReverseEquilibrium: '⇋',
ReverseUpEquilibrium: '⥯',
Rfr: 'ℜ',
Rho: 'Ρ',
RightAngleBracket: '⟩',
RightArrow: '→',
RightArrowBar: '⇥',
RightArrowLeftArrow: '⇄',
RightCeiling: '⌉',
RightDoubleBracket: '⟧',
RightDownTeeVector: '⥝',
RightDownVector: '⇂',
RightDownVectorBar: '⥕',
RightFloor: '⌋',
RightTee: '⊢',
RightTeeArrow: '↦',
RightTeeVector: '⥛',
RightTriangle: '⊳',
RightTriangleBar: '⧐',
RightTriangleEqual: '⊵',
RightUpDownVector: '⥏',
RightUpTeeVector: '⥜',
RightUpVector: '↾',
RightUpVectorBar: '⥔',
RightVector: '⇀',
RightVectorBar: '⥓',
Rightarrow: '⇒',
Ropf: 'ℝ',
RoundImplies: '⥰',
Rrightarrow: '⇛',
Rscr: 'ℛ',
Rsh: '↱',
RuleDelayed: '⧴',
SHCHcy: 'Щ',
SHcy: 'Ш',
SOFTcy: 'Ь',
Sacute: 'Ś',
Sc: '⪼',
Scaron: 'Š',
Scedil: 'Ş',
Scirc: 'Ŝ',
Scy: 'С',
Sfr: '𝔖',
ShortDownArrow: '↓',
ShortLeftArrow: '←',
ShortRightArrow: '→',
ShortUpArrow: '↑',
Sigma: 'Σ',
SmallCircle: '∘',
Sopf: '𝕊',
Sqrt: '√',
Square: '□',
SquareIntersection: '⊓',
SquareSubset: '⊏',
SquareSubsetEqual: '⊑',
SquareSuperset: '⊐',
SquareSupersetEqual: '⊒',
SquareUnion: '⊔',
Sscr: '𝒮',
Star: '⋆',
Sub: '⋐',
Subset: '⋐',
SubsetEqual: '⊆',
Succeeds: '≻',
SucceedsEqual: '⪰',
SucceedsSlantEqual: '≽',
SucceedsTilde: '≿',
SuchThat: '∋',
Sum: '∑',
Sup: '⋑',
Superset: '⊃',
SupersetEqual: '⊇',
Supset: '⋑',
THORN: 'Þ',
TRADE: '™',
TSHcy: 'Ћ',
TScy: 'Ц',
Tab: '\t',
Tau: 'Τ',
Tcaron: 'Ť',
Tcedil: 'Ţ',
Tcy: 'Т',
Tfr: '𝔗',
Therefore: '∴',
Theta: 'Θ',
ThickSpace: ' ',
ThinSpace: ' ',
Tilde: '∼',
TildeEqual: '≃',
TildeFullEqual: '≅',
TildeTilde: '≈',
Topf: '𝕋',
TripleDot: '⃛',
Tscr: '𝒯',
Tstrok: 'Ŧ',
Uacute: 'Ú',
Uarr: '↟',
Uarrocir: '⥉',
Ubrcy: 'Ў',
Ubreve: 'Ŭ',
Ucirc: 'Û',
Ucy: 'У',
Udblac: 'Ű',
Ufr: '𝔘',
Ugrave: 'Ù',
Umacr: 'Ū',
UnderBar: '_',
UnderBrace: '⏟',
UnderBracket: '⎵',
UnderParenthesis: '⏝',
Union: '⋃',
UnionPlus: '⊎',
Uogon: 'Ų',
Uopf: '𝕌',
UpArrow: '↑',
UpArrowBar: '⤒',
UpArrowDownArrow: '⇅',
UpDownArrow: '↕',
UpEquilibrium: '⥮',
UpTee: '⊥',
UpTeeArrow: '↥',
Uparrow: '⇑',
Updownarrow: '⇕',
UpperLeftArrow: '↖',
UpperRightArrow: '↗',
Upsi: 'ϒ',
Upsilon: 'Υ',
Uring: 'Ů',
Uscr: '𝒰',
Utilde: 'Ũ',
Uuml: 'Ü',
VDash: '⊫',
Vbar: '⫫',
Vcy: 'В',
Vdash: '⊩',
Vdashl: '⫦',
Vee: '⋁',
Verbar: '‖',
Vert: '‖',
VerticalBar: '∣',
VerticalLine: '|',
VerticalSeparator: '❘',
VerticalTilde: '≀',
VeryThinSpace: ' ',
Vfr: '𝔙',
Vopf: '𝕍',
Vscr: '𝒱',
Vvdash: '⊪',
Wcirc: 'Ŵ',
Wedge: '⋀',
Wfr: '𝔚',
Wopf: '𝕎',
Wscr: '𝒲',
Xfr: '𝔛',
Xi: 'Ξ',
Xopf: '𝕏',
Xscr: '𝒳',
YAcy: 'Я',
YIcy: 'Ї',
YUcy: 'Ю',
Yacute: 'Ý',
Ycirc: 'Ŷ',
Ycy: 'Ы',
Yfr: '𝔜',
Yopf: '𝕐',
Yscr: '𝒴',
Yuml: 'Ÿ',
ZHcy: 'Ж',
Zacute: 'Ź',
Zcaron: 'Ž',
Zcy: 'З',
Zdot: 'Ż',
ZeroWidthSpace: '',
Zeta: 'Ζ',
Zfr: 'ℨ',
Zopf: 'ℤ',
Zscr: '𝒵',
aacute: 'á',
abreve: 'ă',
ac: '∾',
acE: '∾̳',
acd: '∿',
acirc: 'â',
acute: '´',
acy: 'а',
aelig: 'æ',
af: '',
afr: '𝔞',
agrave: 'à',
alefsym: 'ℵ',
aleph: 'ℵ',
alpha: 'α',
amacr: 'ā',
amalg: '⨿',
amp: '&',
and: '∧',
andand: '⩕',
andd: '⩜',
andslope: '⩘',
andv: '⩚',
ang: '∠',
ange: '⦤',
angle: '∠',
angmsd: '∡',
angmsdaa: '⦨',
angmsdab: '⦩',
angmsdac: '⦪',
angmsdad: '⦫',
angmsdae: '⦬',
angmsdaf: '⦭',
angmsdag: '⦮',
angmsdah: '⦯',
angrt: '∟',
angrtvb: '⊾',
angrtvbd: '⦝',
angsph: '∢',
angst: 'Å',
angzarr: '⍼',
aogon: 'ą',
aopf: '𝕒',
ap: '≈',
apE: '⩰',
apacir: '⩯',
ape: '≊',
apid: '≋',
apos: "'",
approx: '≈',
approxeq: '≊',
aring: 'å',
ascr: '𝒶',
ast: '*',
asymp: '≈',
asympeq: '≍',
atilde: 'ã',
auml: 'ä',
awconint: '∳',
awint: '⨑',
bNot: '⫭',
backcong: '≌',
backepsilon: '϶',
backprime: '‵',
backsim: '∽',
backsimeq: '⋍',
barvee: '⊽',
barwed: '⌅',
barwedge: '⌅',
bbrk: '⎵',
bbrktbrk: '⎶',
bcong: '≌',
bcy: 'б',
bdquo: '„',
becaus: '∵',
because: '∵',
bemptyv: '⦰',
bepsi: '϶',
bernou: 'ℬ',
beta: 'β',
beth: 'ℶ',
between: '≬',
bfr: '𝔟',
bigcap: '⋂',
bigcirc: '◯',
bigcup: '⋃',
bigodot: '⨀',
bigoplus: '⨁',
bigotimes: '⨂',
bigsqcup: '⨆',
bigstar: '★',
bigtriangledown: '▽',
bigtriangleup: '△',
biguplus: '⨄',
bigvee: '⋁',
bigwedge: '⋀',
bkarow: '⤍',
blacklozenge: '⧫',
blacksquare: '▪',
blacktriangle: '▴',
blacktriangledown: '▾',
blacktriangleleft: '◂',
blacktriangleright: '▸',
blank: '␣',
blk12: '▒',
blk14: '░',
blk34: '▓',
block: '█',
bne: '=⃥',
bnequiv: '≡⃥',
bnot: '⌐',
bopf: '𝕓',
bot: '⊥',
bottom: '⊥',
bowtie: '⋈',
boxDL: '╗',
boxDR: '╔',
boxDl: '╖',
boxDr: '╓',
boxH: '═',
boxHD: '╦',
boxHU: '╩',
boxHd: '╤',
boxHu: '╧',
boxUL: '╝',
boxUR: '╚',
boxUl: '╜',
boxUr: '╙',
boxV: '║',
boxVH: '╬',
boxVL: '╣',
boxVR: '╠',
boxVh: '╫',
boxVl: '╢',
boxVr: '╟',
boxbox: '⧉',
boxdL: '╕',
boxdR: '╒',
boxdl: '┐',
boxdr: '┌',
boxh: '─',
boxhD: '╥',
boxhU: '╨',
boxhd: '┬',
boxhu: '┴',
boxminus: '⊟',
boxplus: '⊞',
boxtimes: '⊠',
boxuL: '╛',
boxuR: '╘',
boxul: '┘',
boxur: '└',
boxv: '│',
boxvH: '╪',
boxvL: '╡',
boxvR: '╞',
boxvh: '┼',
boxvl: '┤',
boxvr: '├',
bprime: '‵',
breve: '˘',
brvbar: '¦',
bscr: '𝒷',
bsemi: '⁏',
bsim: '∽',
bsime: '⋍',
bsol: '\\',
bsolb: '⧅',
bsolhsub: '⟈',
bull: '•',
bullet: '•',
bump: '≎',
bumpE: '⪮',
bumpe: '≏',
bumpeq: '≏',
cacute: 'ć',
cap: '∩',
capand: '⩄',
capbrcup: '⩉',
capcap: '⩋',
capcup: '⩇',
capdot: '⩀',
caps: '∩︀',
caret: '⁁',
caron: 'ˇ',
ccaps: '⩍',
ccaron: 'č',
ccedil: 'ç',
ccirc: 'ĉ',
ccups: '⩌',
ccupssm: '⩐',
cdot: 'ċ',
cedil: '¸',
cemptyv: '⦲',
cent: '¢',
centerdot: '·',
cfr: '𝔠',
chcy: 'ч',
check: '✓',
checkmark: '✓',
chi: 'χ',
cir: '○',
cirE: '⧃',
circ: 'ˆ',
circeq: '≗',
circlearrowleft: '↺',
circlearrowright: '↻',
circledR: '®',
circledS: 'Ⓢ',
circledast: '⊛',
circledcirc: '⊚',
circleddash: '⊝',
cire: '≗',
cirfnint: '⨐',
cirmid: '⫯',
cirscir: '⧂',
clubs: '♣',
clubsuit: '♣',
colon: ':',
colone: '≔',
coloneq: '≔',
comma: ',',
commat: '@',
comp: '∁',
compfn: '∘',
complement: '∁',
complexes: 'ℂ',
cong: '≅',
congdot: '⩭',
conint: '∮',
copf: '𝕔',
coprod: '∐',
copy: '©',
copysr: '℗',
crarr: '↵',
cross: '✗',
cscr: '𝒸',
csub: '⫏',
csube: '⫑',
csup: '⫐',
csupe: '⫒',
ctdot: '⋯',
cudarrl: '⤸',
cudarrr: '⤵',
cuepr: '⋞',
cuesc: '⋟',
cularr: '↶',
cularrp: '⤽',
cup: '∪',
cupbrcap: '⩈',
cupcap: '⩆',
cupcup: '⩊',
cupdot: '⊍',
cupor: '⩅',
cups: '∪︀',
curarr: '↷',
curarrm: '⤼',
curlyeqprec: '⋞',
curlyeqsucc: '⋟',
curlyvee: '⋎',
curlywedge: '⋏',
curren: '¤',
curvearrowleft: '↶',
curvearrowright: '↷',
cuvee: '⋎',
cuwed: '⋏',
cwconint: '∲',
cwint: '∱',
cylcty: '⌭',
dArr: '⇓',
dHar: '⥥',
dagger: '†',
daleth: 'ℸ',
darr: '↓',
dash: '‐',
dashv: '⊣',
dbkarow: '⤏',
dblac: '˝',
dcaron: 'ď',
dcy: 'д',
dd: 'ⅆ',
ddagger: '‡',
ddarr: '⇊',
ddotseq: '⩷',
deg: '°',
delta: 'δ',
demptyv: '⦱',
dfisht: '⥿',
dfr: '𝔡',
dharl: '⇃',
dharr: '⇂',
diam: '⋄',
diamond: '⋄',
diamondsuit: '♦',
diams: '♦',
die: '¨',
digamma: 'ϝ',
disin: '⋲',
div: '÷',
divide: '÷',
divideontimes: '⋇',
divonx: '⋇',
djcy: 'ђ',
dlcorn: '⌞',
dlcrop: '⌍',
dollar: '$',
dopf: '𝕕',
dot: '˙',
doteq: '≐',
doteqdot: '≑',
dotminus: '∸',
dotplus: '∔',
dotsquare: '⊡',
doublebarwedge: '⌆',
downarrow: '↓',
downdownarrows: '⇊',
downharpoonleft: '⇃',
downharpoonright: '⇂',
drbkarow: '⤐',
drcorn: '⌟',
drcrop: '⌌',
dscr: '𝒹',
dscy: 'ѕ',
dsol: '⧶',
dstrok: 'đ',
dtdot: '⋱',
dtri: '▿',
dtrif: '▾',
duarr: '⇵',
duhar: '⥯',
dwangle: '⦦',
dzcy: 'џ',
dzigrarr: '⟿',
eDDot: '⩷',
eDot: '≑',
eacute: 'é',
easter: '⩮',
ecaron: 'ě',
ecir: '≖',
ecirc: 'ê',
ecolon: '≕',
ecy: 'э',
edot: 'ė',
ee: 'ⅇ',
efDot: '≒',
efr: '𝔢',
eg: '⪚',
egrave: 'è',
egs: '⪖',
egsdot: '⪘',
el: '⪙',
elinters: '⏧',
ell: 'ℓ',
els: '⪕',
elsdot: '⪗',
emacr: 'ē',
empty: '∅',
emptyset: '∅',
emptyv: '∅',
emsp13: ' ',
emsp14: ' ',
emsp: ' ',
eng: 'ŋ',
ensp: ' ',
eogon: 'ę',
eopf: '𝕖',
epar: '⋕',
eparsl: '⧣',
eplus: '⩱',
epsi: 'ε',
epsilon: 'ε',
epsiv: 'ϵ',
eqcirc: '≖',
eqcolon: '≕',
eqsim: '≂',
eqslantgtr: '⪖',
eqslantless: '⪕',
equals: '=',
equest: '≟',
equiv: '≡',
equivDD: '⩸',
eqvparsl: '⧥',
erDot: '≓',
erarr: '⥱',
escr: 'ℯ',
esdot: '≐',
esim: '≂',
eta: 'η',
eth: 'ð',
euml: 'ë',
euro: '€',
excl: '!',
exist: '∃',
expectation: 'ℰ',
exponentiale: 'ⅇ',
fallingdotseq: '≒',
fcy: 'ф',
female: '♀',
ffilig: 'ffi',
fflig: 'ff',
ffllig: 'ffl',
ffr: '𝔣',
filig: 'fi',
fjlig: 'fj',
flat: '♭',
fllig: 'fl',
fltns: '▱',
fnof: 'ƒ',
fopf: '𝕗',
forall: '∀',
fork: '⋔',
forkv: '⫙',
fpartint: '⨍',
frac12: '½',
frac13: '⅓',
frac14: '¼',
frac15: '⅕',
frac16: '⅙',
frac18: '⅛',
frac23: '⅔',
frac25: '⅖',
frac34: '¾',
frac35: '⅗',
frac38: '⅜',
frac45: '⅘',
frac56: '⅚',
frac58: '⅝',
frac78: '⅞',
frasl: '⁄',
frown: '⌢',
fscr: '𝒻',
gE: '≧',
gEl: '⪌',
gacute: 'ǵ',
gamma: 'γ',
gammad: 'ϝ',
gap: '⪆',
gbreve: 'ğ',
gcirc: 'ĝ',
gcy: 'г',
gdot: 'ġ',
ge: '≥',
gel: '⋛',
geq: '≥',
geqq: '≧',
geqslant: '⩾',
ges: '⩾',
gescc: '⪩',
gesdot: '⪀',
gesdoto: '⪂',
gesdotol: '⪄',
gesl: '⋛︀',
gesles: '⪔',
gfr: '𝔤',
gg: '≫',
ggg: '⋙',
gimel: 'ℷ',
gjcy: 'ѓ',
gl: '≷',
glE: '⪒',
gla: '⪥',
glj: '⪤',
gnE: '≩',
gnap: '⪊',
gnapprox: '⪊',
gne: '⪈',
gneq: '⪈',
gneqq: '≩',
gnsim: '⋧',
gopf: '𝕘',
grave: '`',
gscr: 'ℊ',
gsim: '≳',
gsime: '⪎',
gsiml: '⪐',
gt: '>',
gtcc: '⪧',
gtcir: '⩺',
gtdot: '⋗',
gtlPar: '⦕',
gtquest: '⩼',
gtrapprox: '⪆',
gtrarr: '⥸',
gtrdot: '⋗',
gtreqless: '⋛',
gtreqqless: '⪌',
gtrless: '≷',
gtrsim: '≳',
gvertneqq: '≩︀',
gvnE: '≩︀',
hArr: '⇔',
hairsp: ' ',
half: '½',
hamilt: 'ℋ',
hardcy: 'ъ',
harr: '↔',
harrcir: '⥈',
harrw: '↭',
hbar: 'ℏ',
hcirc: 'ĥ',
hearts: '♥',
heartsuit: '♥',
hellip: '…',
hercon: '⊹',
hfr: '𝔥',
hksearow: '⤥',
hkswarow: '⤦',
hoarr: '⇿',
homtht: '∻',
hookleftarrow: '↩',
hookrightarrow: '↪',
hopf: '𝕙',
horbar: '―',
hscr: '𝒽',
hslash: 'ℏ',
hstrok: 'ħ',
hybull: '⁃',
hyphen: '‐',
iacute: 'í',
ic: '',
icirc: 'î',
icy: 'и',
iecy: 'е',
iexcl: '¡',
iff: '⇔',
ifr: '𝔦',
igrave: 'ì',
ii: 'ⅈ',
iiiint: '⨌',
iiint: '∭',
iinfin: '⧜',
iiota: '℩',
ijlig: 'ij',
imacr: 'ī',
image: 'ℑ',
imagline: 'ℐ',
imagpart: 'ℑ',
imath: 'ı',
imof: '⊷',
imped: 'Ƶ',
in: '∈',
incare: '℅',
infin: '∞',
infintie: '⧝',
inodot: 'ı',
int: '∫',
intcal: '⊺',
integers: 'ℤ',
intercal: '⊺',
intlarhk: '⨗',
intprod: '⨼',
iocy: 'ё',
iogon: 'į',
iopf: '𝕚',
iota: 'ι',
iprod: '⨼',
iquest: '¿',
iscr: '𝒾',
isin: '∈',
isinE: '⋹',
isindot: '⋵',
isins: '⋴',
isinsv: '⋳',
isinv: '∈',
it: '',
itilde: 'ĩ',
iukcy: 'і',
iuml: 'ï',
jcirc: 'ĵ',
jcy: 'й',
jfr: '𝔧',
jmath: 'ȷ',
jopf: '𝕛',
jscr: '𝒿',
jsercy: 'ј',
jukcy: 'є',
kappa: 'κ',
kappav: 'ϰ',
kcedil: 'ķ',
kcy: 'к',
kfr: '𝔨',
kgreen: 'ĸ',
khcy: 'х',
kjcy: 'ќ',
kopf: '𝕜',
kscr: '𝓀',
lAarr: '⇚',
lArr: '⇐',
lAtail: '⤛',
lBarr: '⤎',
lE: '≦',
lEg: '⪋',
lHar: '⥢',
lacute: 'ĺ',
laemptyv: '⦴',
lagran: 'ℒ',
lambda: 'λ',
lang: '⟨',
langd: '⦑',
langle: '⟨',
lap: '⪅',
laquo: '«',
larr: '←',
larrb: '⇤',
larrbfs: '⤟',
larrfs: '⤝',
larrhk: '↩',
larrlp: '↫',
larrpl: '⤹',
larrsim: '⥳',
larrtl: '↢',
lat: '⪫',
latail: '⤙',
late: '⪭',
lates: '⪭︀',
lbarr: '⤌',
lbbrk: '❲',
lbrace: '{',
lbrack: '[',
lbrke: '⦋',
lbrksld: '⦏',
lbrkslu: '⦍',
lcaron: 'ľ',
lcedil: 'ļ',
lceil: '⌈',
lcub: '{',
lcy: 'л',
ldca: '⤶',
ldquo: '“',
ldquor: '„',
ldrdhar: '⥧',
ldrushar: '⥋',
ldsh: '↲',
le: '≤',
leftarrow: '←',
leftarrowtail: '↢',
leftharpoondown: '↽',
leftharpoonup: '↼',
leftleftarrows: '⇇',
leftrightarrow: '↔',
leftrightarrows: '⇆',
leftrightharpoons: '⇋',
leftrightsquigarrow: '↭',
leftthreetimes: '⋋',
leg: '⋚',
leq: '≤',
leqq: '≦',
leqslant: '⩽',
les: '⩽',
lescc: '⪨',
lesdot: '⩿',
lesdoto: '⪁',
lesdotor: '⪃',
lesg: '⋚︀',
lesges: '⪓',
lessapprox: '⪅',
lessdot: '⋖',
lesseqgtr: '⋚',
lesseqqgtr: '⪋',
lessgtr: '≶',
lesssim: '≲',
lfisht: '⥼',
lfloor: '⌊',
lfr: '𝔩',
lg: '≶',
lgE: '⪑',
lhard: '↽',
lharu: '↼',
lharul: '⥪',
lhblk: '▄',
ljcy: 'љ',
ll: '≪',
llarr: '⇇',
llcorner: '⌞',
llhard: '⥫',
lltri: '◺',
lmidot: 'ŀ',
lmoust: '⎰',
lmoustache: '⎰',
lnE: '≨',
lnap: '⪉',
lnapprox: '⪉',
lne: '⪇',
lneq: '⪇',
lneqq: '≨',
lnsim: '⋦',
loang: '⟬',
loarr: '⇽',
lobrk: '⟦',
longleftarrow: '⟵',
longleftrightarrow: '⟷',
longmapsto: '⟼',
longrightarrow: '⟶',
looparrowleft: '↫',
looparrowright: '↬',
lopar: '⦅',
lopf: '𝕝',
loplus: '⨭',
lotimes: '⨴',
lowast: '∗',
lowbar: '_',
loz: '◊',
lozenge: '◊',
lozf: '⧫',
lpar: '(',
lparlt: '⦓',
lrarr: '⇆',
lrcorner: '⌟',
lrhar: '⇋',
lrhard: '⥭',
lrm: '',
lrtri: '⊿',
lsaquo: '‹',
lscr: '𝓁',
lsh: '↰',
lsim: '≲',
lsime: '⪍',
lsimg: '⪏',
lsqb: '[',
lsquo: '‘',
lsquor: '‚',
lstrok: 'ł',
lt: '<',
ltcc: '⪦',
ltcir: '⩹',
ltdot: '⋖',
lthree: '⋋',
ltimes: '⋉',
ltlarr: '⥶',
ltquest: '⩻',
ltrPar: '⦖',
ltri: '◃',
ltrie: '⊴',
ltrif: '◂',
lurdshar: '⥊',
luruhar: '⥦',
lvertneqq: '≨︀',
lvnE: '≨︀',
mDDot: '∺',
macr: '¯',
male: '♂',
malt: '✠',
maltese: '✠',
map: '↦',
mapsto: '↦',
mapstodown: '↧',
mapstoleft: '↤',
mapstoup: '↥',
marker: '▮',
mcomma: '⨩',
mcy: 'м',
mdash: '—',
measuredangle: '∡',
mfr: '𝔪',
mho: '℧',
micro: 'µ',
mid: '∣',
midast: '*',
midcir: '⫰',
middot: '·',
minus: '−',
minusb: '⊟',
minusd: '∸',
minusdu: '⨪',
mlcp: '⫛',
mldr: '…',
mnplus: '∓',
models: '⊧',
mopf: '𝕞',
mp: '∓',
mscr: '𝓂',
mstpos: '∾',
mu: 'μ',
multimap: '⊸',
mumap: '⊸',
nGg: '⋙̸',
nGt: '≫⃒',
nGtv: '≫̸',
nLeftarrow: '⇍',
nLeftrightarrow: '⇎',
nLl: '⋘̸',
nLt: '≪⃒',
nLtv: '≪̸',
nRightarrow: '⇏',
nVDash: '⊯',
nVdash: '⊮',
nabla: '∇',
nacute: 'ń',
nang: '∠⃒',
nap: '≉',
napE: '⩰̸',
napid: '≋̸',
napos: 'ʼn',
napprox: '≉',
natur: '♮',
natural: '♮',
naturals: 'ℕ',
nbsp: ' ',
nbump: '≎̸',
nbumpe: '≏̸',
ncap: '⩃',
ncaron: 'ň',
ncedil: 'ņ',
ncong: '≇',
ncongdot: '⩭̸',
ncup: '⩂',
ncy: 'н',
ndash: '–',
ne: '≠',
neArr: '⇗',
nearhk: '⤤',
nearr: '↗',
nearrow: '↗',
nedot: '≐̸',
nequiv: '≢',
nesear: '⤨',
nesim: '≂̸',
nexist: '∄',
nexists: '∄',
nfr: '𝔫',
ngE: '≧̸',
nge: '≱',
ngeq: '≱',
ngeqq: '≧̸',
ngeqslant: '⩾̸',
nges: '⩾̸',
ngsim: '≵',
ngt: '≯',
ngtr: '≯',
nhArr: '⇎',
nharr: '↮',
nhpar: '⫲',
ni: '∋',
nis: '⋼',
nisd: '⋺',
niv: '∋',
njcy: 'њ',
nlArr: '⇍',
nlE: '≦̸',
nlarr: '↚',
nldr: '‥',
nle: '≰',
nleftarrow: '↚',
nleftrightarrow: '↮',
nleq: '≰',
nleqq: '≦̸',
nleqslant: '⩽̸',
nles: '⩽̸',
nless: '≮',
nlsim: '≴',
nlt: '≮',
nltri: '⋪',
nltrie: '⋬',
nmid: '∤',
nopf: '𝕟',
not: '¬',
notin: '∉',
notinE: '⋹̸',
notindot: '⋵̸',
notinva: '∉',
notinvb: '⋷',
notinvc: '⋶',
notni: '∌',
notniva: '∌',
notnivb: '⋾',
notnivc: '⋽',
npar: '∦',
nparallel: '∦',
nparsl: '⫽⃥',
npart: '∂̸',
npolint: '⨔',
npr: '⊀',
nprcue: '⋠',
npre: '⪯̸',
nprec: '⊀',
npreceq: '⪯̸',
nrArr: '⇏',
nrarr: '↛',
nrarrc: '⤳̸',
nrarrw: '↝̸',
nrightarrow: '↛',
nrtri: '⋫',
nrtrie: '⋭',
nsc: '⊁',
nsccue: '⋡',
nsce: '⪰̸',
nscr: '𝓃',
nshortmid: '∤',
nshortparallel: '∦',
nsim: '≁',
nsime: '≄',
nsimeq: '≄',
nsmid: '∤',
nspar: '∦',
nsqsube: '⋢',
nsqsupe: '⋣',
nsub: '⊄',
nsubE: '⫅̸',
nsube: '⊈',
nsubset: '⊂⃒',
nsubseteq: '⊈',
nsubseteqq: '⫅̸',
nsucc: '⊁',
nsucceq: '⪰̸',
nsup: '⊅',
nsupE: '⫆̸',
nsupe: '⊉',
nsupset: '⊃⃒',
nsupseteq: '⊉',
nsupseteqq: '⫆̸',
ntgl: '≹',
ntilde: 'ñ',
ntlg: '≸',
ntriangleleft: '⋪',
ntrianglelefteq: '⋬',
ntriangleright: '⋫',
ntrianglerighteq: '⋭',
nu: 'ν',
num: '#',
numero: '№',
numsp: ' ',
nvDash: '⊭',
nvHarr: '⤄',
nvap: '≍⃒',
nvdash: '⊬',
nvge: '≥⃒',
nvgt: '>⃒',
nvinfin: '⧞',
nvlArr: '⤂',
nvle: '≤⃒',
nvlt: '<⃒',
nvltrie: '⊴⃒',
nvrArr: '⤃',
nvrtrie: '⊵⃒',
nvsim: '∼⃒',
nwArr: '⇖',
nwarhk: '⤣',
nwarr: '↖',
nwarrow: '↖',
nwnear: '⤧',
oS: 'Ⓢ',
oacute: 'ó',
oast: '⊛',
ocir: '⊚',
ocirc: 'ô',
ocy: 'о',
odash: '⊝',
odblac: 'ő',
odiv: '⨸',
odot: '⊙',
odsold: '⦼',
oelig: 'œ',
ofcir: '⦿',
ofr: '𝔬',
ogon: '˛',
ograve: 'ò',
ogt: '⧁',
ohbar: '⦵',
ohm: 'Ω',
oint: '∮',
olarr: '↺',
olcir: '⦾',
olcross: '⦻',
oline: '‾',
olt: '⧀',
omacr: 'ō',
omega: 'ω',
omicron: 'ο',
omid: '⦶',
ominus: '⊖',
oopf: '𝕠',
opar: '⦷',
operp: '⦹',
oplus: '⊕',
or: '∨',
orarr: '↻',
ord: '⩝',
order: 'ℴ',
orderof: 'ℴ',
ordf: 'ª',
ordm: 'º',
origof: '⊶',
oror: '⩖',
orslope: '⩗',
orv: '⩛',
oscr: 'ℴ',
oslash: 'ø',
osol: '⊘',
otilde: 'õ',
otimes: '⊗',
otimesas: '⨶',
ouml: 'ö',
ovbar: '⌽',
par: '∥',
para: '¶',
parallel: '∥',
parsim: '⫳',
parsl: '⫽',
part: '∂',
pcy: 'п',
percnt: '%',
period: '.',
permil: '‰',
perp: '⊥',
pertenk: '‱',
pfr: '𝔭',
phi: 'φ',
phiv: 'ϕ',
phmmat: 'ℳ',
phone: '☎',
pi: 'π',
pitchfork: '⋔',
piv: 'ϖ',
planck: 'ℏ',
planckh: 'ℎ',
plankv: 'ℏ',
plus: '+',
plusacir: '⨣',
plusb: '⊞',
pluscir: '⨢',
plusdo: '∔',
plusdu: '⨥',
pluse: '⩲',
plusmn: '±',
plussim: '⨦',
plustwo: '⨧',
pm: '±',
pointint: '⨕',
popf: '𝕡',
pound: '£',
pr: '≺',
prE: '⪳',
prap: '⪷',
prcue: '≼',
pre: '⪯',
prec: '≺',
precapprox: '⪷',
preccurlyeq: '≼',
preceq: '⪯',
precnapprox: '⪹',
precneqq: '⪵',
precnsim: '⋨',
precsim: '≾',
prime: '′',
primes: 'ℙ',
prnE: '⪵',
prnap: '⪹',
prnsim: '⋨',
prod: '∏',
profalar: '⌮',
profline: '⌒',
profsurf: '⌓',
prop: '∝',
propto: '∝',
prsim: '≾',
prurel: '⊰',
pscr: '𝓅',
psi: 'ψ',
puncsp: ' ',
qfr: '𝔮',
qint: '⨌',
qopf: '𝕢',
qprime: '⁗',
qscr: '𝓆',
quaternions: 'ℍ',
quatint: '⨖',
quest: '?',
questeq: '≟',
quot: '"',
rAarr: '⇛',
rArr: '⇒',
rAtail: '⤜',
rBarr: '⤏',
rHar: '⥤',
race: '∽̱',
racute: 'ŕ',
radic: '√',
raemptyv: '⦳',
rang: '⟩',
rangd: '⦒',
range: '⦥',
rangle: '⟩',
raquo: '»',
rarr: '→',
rarrap: '⥵',
rarrb: '⇥',
rarrbfs: '⤠',
rarrc: '⤳',
rarrfs: '⤞',
rarrhk: '↪',
rarrlp: '↬',
rarrpl: '⥅',
rarrsim: '⥴',
rarrtl: '↣',
rarrw: '↝',
ratail: '⤚',
ratio: '∶',
rationals: 'ℚ',
rbarr: '⤍',
rbbrk: '❳',
rbrace: '}',
rbrack: ']',
rbrke: '⦌',
rbrksld: '⦎',
rbrkslu: '⦐',
rcaron: 'ř',
rcedil: 'ŗ',
rceil: '⌉',
rcub: '}',
rcy: 'р',
rdca: '⤷',
rdldhar: '⥩',
rdquo: '”',
rdquor: '”',
rdsh: '↳',
real: 'ℜ',
realine: 'ℛ',
realpart: 'ℜ',
reals: 'ℝ',
rect: '▭',
reg: '®',
rfisht: '⥽',
rfloor: '⌋',
rfr: '𝔯',
rhard: '⇁',
rharu: '⇀',
rharul: '⥬',
rho: 'ρ',
rhov: 'ϱ',
rightarrow: '→',
rightarrowtail: '↣',
rightharpoondown: '⇁',
rightharpoonup: '⇀',
rightleftarrows: '⇄',
rightleftharpoons: '⇌',
rightrightarrows: '⇉',
rightsquigarrow: '↝',
rightthreetimes: '⋌',
ring: '˚',
risingdotseq: '≓',
rlarr: '⇄',
rlhar: '⇌',
rlm: '',
rmoust: '⎱',
rmoustache: '⎱',
rnmid: '⫮',
roang: '⟭',
roarr: '⇾',
robrk: '⟧',
ropar: '⦆',
ropf: '𝕣',
roplus: '⨮',
rotimes: '⨵',
rpar: ')',
rpargt: '⦔',
rppolint: '⨒',
rrarr: '⇉',
rsaquo: '›',
rscr: '𝓇',
rsh: '↱',
rsqb: ']',
rsquo: '’',
rsquor: '’',
rthree: '⋌',
rtimes: '⋊',
rtri: '▹',
rtrie: '⊵',
rtrif: '▸',
rtriltri: '⧎',
ruluhar: '⥨',
rx: '℞',
sacute: 'ś',
sbquo: '‚',
sc: '≻',
scE: '⪴',
scap: '⪸',
scaron: 'š',
sccue: '≽',
sce: '⪰',
scedil: 'ş',
scirc: 'ŝ',
scnE: '⪶',
scnap: '⪺',
scnsim: '⋩',
scpolint: '⨓',
scsim: '≿',
scy: 'с',
sdot: '⋅',
sdotb: '⊡',
sdote: '⩦',
seArr: '⇘',
searhk: '⤥',
searr: '↘',
searrow: '↘',
sect: '§',
semi: ';',
seswar: '⤩',
setminus: '∖',
setmn: '∖',
sext: '✶',
sfr: '𝔰',
sfrown: '⌢',
sharp: '♯',
shchcy: 'щ',
shcy: 'ш',
shortmid: '∣',
shortparallel: '∥',
shy: '',
sigma: 'σ',
sigmaf: 'ς',
sigmav: 'ς',
sim: '∼',
simdot: '⩪',
sime: '≃',
simeq: '≃',
simg: '⪞',
simgE: '⪠',
siml: '⪝',
simlE: '⪟',
simne: '≆',
simplus: '⨤',
simrarr: '⥲',
slarr: '←',
smallsetminus: '∖',
smashp: '⨳',
smeparsl: '⧤',
smid: '∣',
smile: '⌣',
smt: '⪪',
smte: '⪬',
smtes: '⪬︀',
softcy: 'ь',
sol: '/',
solb: '⧄',
solbar: '⌿',
sopf: '𝕤',
spades: '♠',
spadesuit: '♠',
spar: '∥',
sqcap: '⊓',
sqcaps: '⊓︀',
sqcup: '⊔',
sqcups: '⊔︀',
sqsub: '⊏',
sqsube: '⊑',
sqsubset: '⊏',
sqsubseteq: '⊑',
sqsup: '⊐',
sqsupe: '⊒',
sqsupset: '⊐',
sqsupseteq: '⊒',
squ: '□',
square: '□',
squarf: '▪',
squf: '▪',
srarr: '→',
sscr: '𝓈',
ssetmn: '∖',
ssmile: '⌣',
sstarf: '⋆',
star: '☆',
starf: '★',
straightepsilon: 'ϵ',
straightphi: 'ϕ',
strns: '¯',
sub: '⊂',
subE: '⫅',
subdot: '⪽',
sube: '⊆',
subedot: '⫃',
submult: '⫁',
subnE: '⫋',
subne: '⊊',
subplus: '⪿',
subrarr: '⥹',
subset: '⊂',
subseteq: '⊆',
subseteqq: '⫅',
subsetneq: '⊊',
subsetneqq: '⫋',
subsim: '⫇',
subsub: '⫕',
subsup: '⫓',
succ: '≻',
succapprox: '⪸',
succcurlyeq: '≽',
succeq: '⪰',
succnapprox: '⪺',
succneqq: '⪶',
succnsim: '⋩',
succsim: '≿',
sum: '∑',
sung: '♪',
sup1: '¹',
sup2: '²',
sup3: '³',
sup: '⊃',
supE: '⫆',
supdot: '⪾',
supdsub: '⫘',
supe: '⊇',
supedot: '⫄',
suphsol: '⟉',
suphsub: '⫗',
suplarr: '⥻',
supmult: '⫂',
supnE: '⫌',
supne: '⊋',
supplus: '⫀',
supset: '⊃',
supseteq: '⊇',
supseteqq: '⫆',
supsetneq: '⊋',
supsetneqq: '⫌',
supsim: '⫈',
supsub: '⫔',
supsup: '⫖',
swArr: '⇙',
swarhk: '⤦',
swarr: '↙',
swarrow: '↙',
swnwar: '⤪',
szlig: 'ß',
target: '⌖',
tau: 'τ',
tbrk: '⎴',
tcaron: 'ť',
tcedil: 'ţ',
tcy: 'т',
tdot: '⃛',
telrec: '⌕',
tfr: '𝔱',
there4: '∴',
therefore: '∴',
theta: 'θ',
thetasym: 'ϑ',
thetav: 'ϑ',
thickapprox: '≈',
thicksim: '∼',
thinsp: ' ',
thkap: '≈',
thksim: '∼',
thorn: 'þ',
tilde: '˜',
times: '×',
timesb: '⊠',
timesbar: '⨱',
timesd: '⨰',
tint: '∭',
toea: '⤨',
top: '⊤',
topbot: '⌶',
topcir: '⫱',
topf: '𝕥',
topfork: '⫚',
tosa: '⤩',
tprime: '‴',
trade: '™',
triangle: '▵',
triangledown: '▿',
triangleleft: '◃',
trianglelefteq: '⊴',
triangleq: '≜',
triangleright: '▹',
trianglerighteq: '⊵',
tridot: '◬',
trie: '≜',
triminus: '⨺',
triplus: '⨹',
trisb: '⧍',
tritime: '⨻',
trpezium: '⏢',
tscr: '𝓉',
tscy: 'ц',
tshcy: 'ћ',
tstrok: 'ŧ',
twixt: '≬',
twoheadleftarrow: '↞',
twoheadrightarrow: '↠',
uArr: '⇑',
uHar: '⥣',
uacute: 'ú',
uarr: '↑',
ubrcy: 'ў',
ubreve: 'ŭ',
ucirc: 'û',
ucy: 'у',
udarr: '⇅',
udblac: 'ű',
udhar: '⥮',
ufisht: '⥾',
ufr: '𝔲',
ugrave: 'ù',
uharl: '↿',
uharr: '↾',
uhblk: '▀',
ulcorn: '⌜',
ulcorner: '⌜',
ulcrop: '⌏',
ultri: '◸',
umacr: 'ū',
uml: '¨',
uogon: 'ų',
uopf: '𝕦',
uparrow: '↑',
updownarrow: '↕',
upharpoonleft: '↿',
upharpoonright: '↾',
uplus: '⊎',
upsi: 'υ',
upsih: 'ϒ',
upsilon: 'υ',
upuparrows: '⇈',
urcorn: '⌝',
urcorner: '⌝',
urcrop: '⌎',
uring: 'ů',
urtri: '◹',
uscr: '𝓊',
utdot: '⋰',
utilde: 'ũ',
utri: '▵',
utrif: '▴',
uuarr: '⇈',
uuml: 'ü',
uwangle: '⦧',
vArr: '⇕',
vBar: '⫨',
vBarv: '⫩',
vDash: '⊨',
vangrt: '⦜',
varepsilon: 'ϵ',
varkappa: 'ϰ',
varnothing: '∅',
varphi: 'ϕ',
varpi: 'ϖ',
varpropto: '∝',
varr: '↕',
varrho: 'ϱ',
varsigma: 'ς',
varsubsetneq: '⊊︀',
varsubsetneqq: '⫋︀',
varsupsetneq: '⊋︀',
varsupsetneqq: '⫌︀',
vartheta: 'ϑ',
vartriangleleft: '⊲',
vartriangleright: '⊳',
vcy: 'в',
vdash: '⊢',
vee: '∨',
veebar: '⊻',
veeeq: '≚',
vellip: '⋮',
verbar: '|',
vert: '|',
vfr: '𝔳',
vltri: '⊲',
vnsub: '⊂⃒',
vnsup: '⊃⃒',
vopf: '𝕧',
vprop: '∝',
vrtri: '⊳',
vscr: '𝓋',
vsubnE: '⫋︀',
vsubne: '⊊︀',
vsupnE: '⫌︀',
vsupne: '⊋︀',
vzigzag: '⦚',
wcirc: 'ŵ',
wedbar: '⩟',
wedge: '∧',
wedgeq: '≙',
weierp: '℘',
wfr: '𝔴',
wopf: '𝕨',
wp: '℘',
wr: '≀',
wreath: '≀',
wscr: '𝓌',
xcap: '⋂',
xcirc: '◯',
xcup: '⋃',
xdtri: '▽',
xfr: '𝔵',
xhArr: '⟺',
xharr: '⟷',
xi: 'ξ',
xlArr: '⟸',
xlarr: '⟵',
xmap: '⟼',
xnis: '⋻',
xodot: '⨀',
xopf: '𝕩',
xoplus: '⨁',
xotime: '⨂',
xrArr: '⟹',
xrarr: '⟶',
xscr: '𝓍',
xsqcup: '⨆',
xuplus: '⨄',
xutri: '△',
xvee: '⋁',
xwedge: '⋀',
yacute: 'ý',
yacy: 'я',
ycirc: 'ŷ',
ycy: 'ы',
yen: '¥',
yfr: '𝔶',
yicy: 'ї',
yopf: '𝕪',
yscr: '𝓎',
yucy: 'ю',
yuml: 'ÿ',
zacute: 'ź',
zcaron: 'ž',
zcy: 'з',
zdot: 'ż',
zeetrf: 'ℨ',
zeta: 'ζ',
zfr: '𝔷',
zhcy: 'ж',
zigrarr: '⇝',
zopf: '𝕫',
zscr: '𝓏',
zwj: '',
zwnj: ''
}
;// CONCATENATED MODULE: ./node_modules/decode-named-character-reference/index.js
const own = {}.hasOwnProperty
/**
* Decode a single character reference (without the `&` or `;`).
* You probably only need this when you’re building parsers yourself that follow
* different rules compared to HTML.
* This is optimized to be tiny in browsers.
*
* @param {string} value
* `notin` (named), `#123` (deci), `#x123` (hexa).
* @returns {string|false}
* Decoded reference.
*/
function decodeNamedCharacterReference(value) {
return own.call(characterEntities, value) ? characterEntities[value] : false
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/character-reference.js
/**
* @typedef {import('micromark-util-types').Code} Code
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const characterReference = {
name: 'characterReference',
tokenize: tokenizeCharacterReference
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeCharacterReference(effects, ok, nok) {
const self = this
let size = 0
/** @type {number} */
let max
/** @type {(code: Code) => boolean} */
let test
return start
/**
* Start of character reference.
*
* ```markdown
* > | a&b
* ^
* > | a{b
* ^
* > | a b
* ^
* ```
*
* @type {State}
*/
function start(code) {
effects.enter('characterReference')
effects.enter('characterReferenceMarker')
effects.consume(code)
effects.exit('characterReferenceMarker')
return open
}
/**
* After `&`, at `#` for numeric references or alphanumeric for named
* references.
*
* ```markdown
* > | a&b
* ^
* > | a{b
* ^
* > | a b
* ^
* ```
*
* @type {State}
*/
function open(code) {
if (code === 35) {
effects.enter('characterReferenceMarkerNumeric')
effects.consume(code)
effects.exit('characterReferenceMarkerNumeric')
return numeric
}
effects.enter('characterReferenceValue')
max = 31
test = asciiAlphanumeric
return value(code)
}
/**
* After `#`, at `x` for hexadecimals or digit for decimals.
*
* ```markdown
* > | a{b
* ^
* > | a b
* ^
* ```
*
* @type {State}
*/
function numeric(code) {
if (code === 88 || code === 120) {
effects.enter('characterReferenceMarkerHexadecimal')
effects.consume(code)
effects.exit('characterReferenceMarkerHexadecimal')
effects.enter('characterReferenceValue')
max = 6
test = asciiHexDigit
return value
}
effects.enter('characterReferenceValue')
max = 7
test = asciiDigit
return value(code)
}
/**
* After markers (``, ``, or `&`), in value, before `;`.
*
* The character reference kind defines what and how many characters are
* allowed.
*
* ```markdown
* > | a&b
* ^^^
* > | a{b
* ^^^
* > | a b
* ^
* ```
*
* @type {State}
*/
function value(code) {
if (code === 59 && size) {
const token = effects.exit('characterReferenceValue')
if (
test === asciiAlphanumeric &&
!decodeNamedCharacterReference(self.sliceSerialize(token))
) {
return nok(code)
}
// To do: `markdown-rs` uses a different name:
// `CharacterReferenceMarkerSemi`.
effects.enter('characterReferenceMarker')
effects.consume(code)
effects.exit('characterReferenceMarker')
effects.exit('characterReference')
return ok
}
if (test(code) && size++ < max) {
effects.consume(code)
return value
}
return nok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/character-escape.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const characterEscape = {
name: 'characterEscape',
tokenize: tokenizeCharacterEscape
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeCharacterEscape(effects, ok, nok) {
return start
/**
* Start of character escape.
*
* ```markdown
* > | a\*b
* ^
* ```
*
* @type {State}
*/
function start(code) {
effects.enter('characterEscape')
effects.enter('escapeMarker')
effects.consume(code)
effects.exit('escapeMarker')
return inside
}
/**
* After `\`, at punctuation.
*
* ```markdown
* > | a\*b
* ^
* ```
*
* @type {State}
*/
function inside(code) {
// ASCII punctuation.
if (asciiPunctuation(code)) {
effects.enter('characterEscapeValue')
effects.consume(code)
effects.exit('characterEscapeValue')
effects.exit('characterEscape')
return ok
}
return nok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/line-ending.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const lineEnding = {
name: 'lineEnding',
tokenize: tokenizeLineEnding
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeLineEnding(effects, ok) {
return start
/** @type {State} */
function start(code) {
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return factorySpace(effects, ok, 'linePrefix')
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/label-end.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').Event} Event
* @typedef {import('micromark-util-types').Resolver} Resolver
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').Token} Token
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const labelEnd = {
name: 'labelEnd',
tokenize: tokenizeLabelEnd,
resolveTo: resolveToLabelEnd,
resolveAll: resolveAllLabelEnd
}
/** @type {Construct} */
const resourceConstruct = {
tokenize: tokenizeResource
}
/** @type {Construct} */
const referenceFullConstruct = {
tokenize: tokenizeReferenceFull
}
/** @type {Construct} */
const referenceCollapsedConstruct = {
tokenize: tokenizeReferenceCollapsed
}
/** @type {Resolver} */
function resolveAllLabelEnd(events) {
let index = -1
while (++index < events.length) {
const token = events[index][1]
if (
token.type === 'labelImage' ||
token.type === 'labelLink' ||
token.type === 'labelEnd'
) {
// Remove the marker.
events.splice(index + 1, token.type === 'labelImage' ? 4 : 2)
token.type = 'data'
index++
}
}
return events
}
/** @type {Resolver} */
function resolveToLabelEnd(events, context) {
let index = events.length
let offset = 0
/** @type {Token} */
let token
/** @type {number | undefined} */
let open
/** @type {number | undefined} */
let close
/** @type {Array} */
let media
// Find an opening.
while (index--) {
token = events[index][1]
if (open) {
// If we see another link, or inactive link label, we’ve been here before.
if (
token.type === 'link' ||
(token.type === 'labelLink' && token._inactive)
) {
break
}
// Mark other link openings as inactive, as we can’t have links in
// links.
if (events[index][0] === 'enter' && token.type === 'labelLink') {
token._inactive = true
}
} else if (close) {
if (
events[index][0] === 'enter' &&
(token.type === 'labelImage' || token.type === 'labelLink') &&
!token._balanced
) {
open = index
if (token.type !== 'labelLink') {
offset = 2
break
}
}
} else if (token.type === 'labelEnd') {
close = index
}
}
const group = {
type: events[open][1].type === 'labelLink' ? 'link' : 'image',
start: Object.assign({}, events[open][1].start),
end: Object.assign({}, events[events.length - 1][1].end)
}
const label = {
type: 'label',
start: Object.assign({}, events[open][1].start),
end: Object.assign({}, events[close][1].end)
}
const text = {
type: 'labelText',
start: Object.assign({}, events[open + offset + 2][1].end),
end: Object.assign({}, events[close - 2][1].start)
}
media = [
['enter', group, context],
['enter', label, context]
]
// Opening marker.
media = push(media, events.slice(open + 1, open + offset + 3))
// Text open.
media = push(media, [['enter', text, context]])
// Always populated by defaults.
// Between.
media = push(
media,
resolveAll(
context.parser.constructs.insideSpan.null,
events.slice(open + offset + 4, close - 3),
context
)
)
// Text close, marker close, label close.
media = push(media, [
['exit', text, context],
events[close - 2],
events[close - 1],
['exit', label, context]
])
// Reference, resource, or so.
media = push(media, events.slice(close + 1))
// Media close.
media = push(media, [['exit', group, context]])
splice(events, open, events.length, media)
return events
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeLabelEnd(effects, ok, nok) {
const self = this
let index = self.events.length
/** @type {Token} */
let labelStart
/** @type {boolean} */
let defined
// Find an opening.
while (index--) {
if (
(self.events[index][1].type === 'labelImage' ||
self.events[index][1].type === 'labelLink') &&
!self.events[index][1]._balanced
) {
labelStart = self.events[index][1]
break
}
}
return start
/**
* Start of label end.
*
* ```markdown
* > | [a](b) c
* ^
* > | [a][b] c
* ^
* > | [a][] b
* ^
* > | [a] b
* ```
*
* @type {State}
*/
function start(code) {
// If there is not an okay opening.
if (!labelStart) {
return nok(code)
}
// If the corresponding label (link) start is marked as inactive,
// it means we’d be wrapping a link, like this:
//
// ```markdown
// > | a [b [c](d) e](f) g.
// ^
// ```
//
// We can’t have that, so it’s just balanced brackets.
if (labelStart._inactive) {
return labelEndNok(code)
}
defined = self.parser.defined.includes(
normalizeIdentifier(
self.sliceSerialize({
start: labelStart.end,
end: self.now()
})
)
)
effects.enter('labelEnd')
effects.enter('labelMarker')
effects.consume(code)
effects.exit('labelMarker')
effects.exit('labelEnd')
return after
}
/**
* After `]`.
*
* ```markdown
* > | [a](b) c
* ^
* > | [a][b] c
* ^
* > | [a][] b
* ^
* > | [a] b
* ^
* ```
*
* @type {State}
*/
function after(code) {
// Note: `markdown-rs` also parses GFM footnotes here, which for us is in
// an extension.
// Resource (`[asd](fgh)`)?
if (code === 40) {
return effects.attempt(
resourceConstruct,
labelEndOk,
defined ? labelEndOk : labelEndNok
)(code)
}
// Full (`[asd][fgh]`) or collapsed (`[asd][]`) reference?
if (code === 91) {
return effects.attempt(
referenceFullConstruct,
labelEndOk,
defined ? referenceNotFull : labelEndNok
)(code)
}
// Shortcut (`[asd]`) reference?
return defined ? labelEndOk(code) : labelEndNok(code)
}
/**
* After `]`, at `[`, but not at a full reference.
*
* > 👉 **Note**: we only get here if the label is defined.
*
* ```markdown
* > | [a][] b
* ^
* > | [a] b
* ^
* ```
*
* @type {State}
*/
function referenceNotFull(code) {
return effects.attempt(
referenceCollapsedConstruct,
labelEndOk,
labelEndNok
)(code)
}
/**
* Done, we found something.
*
* ```markdown
* > | [a](b) c
* ^
* > | [a][b] c
* ^
* > | [a][] b
* ^
* > | [a] b
* ^
* ```
*
* @type {State}
*/
function labelEndOk(code) {
// Note: `markdown-rs` does a bunch of stuff here.
return ok(code)
}
/**
* Done, it’s nothing.
*
* There was an okay opening, but we didn’t match anything.
*
* ```markdown
* > | [a](b c
* ^
* > | [a][b c
* ^
* > | [a] b
* ^
* ```
*
* @type {State}
*/
function labelEndNok(code) {
labelStart._balanced = true
return nok(code)
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeResource(effects, ok, nok) {
return resourceStart
/**
* At a resource.
*
* ```markdown
* > | [a](b) c
* ^
* ```
*
* @type {State}
*/
function resourceStart(code) {
effects.enter('resource')
effects.enter('resourceMarker')
effects.consume(code)
effects.exit('resourceMarker')
return resourceBefore
}
/**
* In resource, after `(`, at optional whitespace.
*
* ```markdown
* > | [a](b) c
* ^
* ```
*
* @type {State}
*/
function resourceBefore(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, resourceOpen)(code)
: resourceOpen(code)
}
/**
* In resource, after optional whitespace, at `)` or a destination.
*
* ```markdown
* > | [a](b) c
* ^
* ```
*
* @type {State}
*/
function resourceOpen(code) {
if (code === 41) {
return resourceEnd(code)
}
return factoryDestination(
effects,
resourceDestinationAfter,
resourceDestinationMissing,
'resourceDestination',
'resourceDestinationLiteral',
'resourceDestinationLiteralMarker',
'resourceDestinationRaw',
'resourceDestinationString',
32
)(code)
}
/**
* In resource, after destination, at optional whitespace.
*
* ```markdown
* > | [a](b) c
* ^
* ```
*
* @type {State}
*/
function resourceDestinationAfter(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, resourceBetween)(code)
: resourceEnd(code)
}
/**
* At invalid destination.
*
* ```markdown
* > | [a](<<) b
* ^
* ```
*
* @type {State}
*/
function resourceDestinationMissing(code) {
return nok(code)
}
/**
* In resource, after destination and whitespace, at `(` or title.
*
* ```markdown
* > | [a](b ) c
* ^
* ```
*
* @type {State}
*/
function resourceBetween(code) {
if (code === 34 || code === 39 || code === 40) {
return factoryTitle(
effects,
resourceTitleAfter,
nok,
'resourceTitle',
'resourceTitleMarker',
'resourceTitleString'
)(code)
}
return resourceEnd(code)
}
/**
* In resource, after title, at optional whitespace.
*
* ```markdown
* > | [a](b "c") d
* ^
* ```
*
* @type {State}
*/
function resourceTitleAfter(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, resourceEnd)(code)
: resourceEnd(code)
}
/**
* In resource, at `)`.
*
* ```markdown
* > | [a](b) d
* ^
* ```
*
* @type {State}
*/
function resourceEnd(code) {
if (code === 41) {
effects.enter('resourceMarker')
effects.consume(code)
effects.exit('resourceMarker')
effects.exit('resource')
return ok
}
return nok(code)
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeReferenceFull(effects, ok, nok) {
const self = this
return referenceFull
/**
* In a reference (full), at the `[`.
*
* ```markdown
* > | [a][b] d
* ^
* ```
*
* @type {State}
*/
function referenceFull(code) {
return factoryLabel.call(
self,
effects,
referenceFullAfter,
referenceFullMissing,
'reference',
'referenceMarker',
'referenceString'
)(code)
}
/**
* In a reference (full), after `]`.
*
* ```markdown
* > | [a][b] d
* ^
* ```
*
* @type {State}
*/
function referenceFullAfter(code) {
return self.parser.defined.includes(
normalizeIdentifier(
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
)
)
? ok(code)
: nok(code)
}
/**
* In reference (full) that was missing.
*
* ```markdown
* > | [a][b d
* ^
* ```
*
* @type {State}
*/
function referenceFullMissing(code) {
return nok(code)
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeReferenceCollapsed(effects, ok, nok) {
return referenceCollapsedStart
/**
* In reference (collapsed), at `[`.
*
* > 👉 **Note**: we only get here if the label is defined.
*
* ```markdown
* > | [a][] d
* ^
* ```
*
* @type {State}
*/
function referenceCollapsedStart(code) {
// We only attempt a collapsed label if there’s a `[`.
effects.enter('reference')
effects.enter('referenceMarker')
effects.consume(code)
effects.exit('referenceMarker')
return referenceCollapsedOpen
}
/**
* In reference (collapsed), at `]`.
*
* > 👉 **Note**: we only get here if the label is defined.
*
* ```markdown
* > | [a][] d
* ^
* ```
*
* @type {State}
*/
function referenceCollapsedOpen(code) {
if (code === 93) {
effects.enter('referenceMarker')
effects.consume(code)
effects.exit('referenceMarker')
effects.exit('reference')
return ok
}
return nok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/label-start-image.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const labelStartImage = {
name: 'labelStartImage',
tokenize: tokenizeLabelStartImage,
resolveAll: labelEnd.resolveAll
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeLabelStartImage(effects, ok, nok) {
const self = this
return start
/**
* Start of label (image) start.
*
* ```markdown
* > | a ![b] c
* ^
* ```
*
* @type {State}
*/
function start(code) {
effects.enter('labelImage')
effects.enter('labelImageMarker')
effects.consume(code)
effects.exit('labelImageMarker')
return open
}
/**
* After `!`, at `[`.
*
* ```markdown
* > | a ![b] c
* ^
* ```
*
* @type {State}
*/
function open(code) {
if (code === 91) {
effects.enter('labelMarker')
effects.consume(code)
effects.exit('labelMarker')
effects.exit('labelImage')
return after
}
return nok(code)
}
/**
* After `![`.
*
* ```markdown
* > | a ![b] c
* ^
* ```
*
* This is needed in because, when GFM footnotes are enabled, images never
* form when started with a `^`.
* Instead, links form:
*
* ```markdown
* ![^a](b)
*
* ![^a][b]
*
* [b]: c
* ```
*
* ```html
* !^a
* !^a
* ```
*
* @type {State}
*/
function after(code) {
// To do: use a new field to do this, this is still needed for
// `micromark-extension-gfm-footnote`, but the `label-start-link`
// behavior isn’t.
// Hidden footnotes hook.
/* c8 ignore next 3 */
return code === 94 && '_hiddenFootnoteSupport' in self.parser.constructs
? nok(code)
: ok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-util-classify-character/index.js
/**
* @typedef {import('micromark-util-types').Code} Code
*/
/**
* Classify whether a code represents whitespace, punctuation, or something
* else.
*
* Used for attention (emphasis, strong), whose sequences can open or close
* based on the class of surrounding characters.
*
* > 👉 **Note**: eof (`null`) is seen as whitespace.
*
* @param {Code} code
* Code.
* @returns {typeof constants.characterGroupWhitespace | typeof constants.characterGroupPunctuation | undefined}
* Group.
*/
function classifyCharacter(code) {
if (
code === null ||
markdownLineEndingOrSpace(code) ||
unicodeWhitespace(code)
) {
return 1
}
if (unicodePunctuation(code)) {
return 2
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/attention.js
/**
* @typedef {import('micromark-util-types').Code} Code
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').Event} Event
* @typedef {import('micromark-util-types').Point} Point
* @typedef {import('micromark-util-types').Resolver} Resolver
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').Token} Token
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const attention = {
name: 'attention',
tokenize: tokenizeAttention,
resolveAll: resolveAllAttention
}
/**
* Take all events and resolve attention to emphasis or strong.
*
* @type {Resolver}
*/
function resolveAllAttention(events, context) {
let index = -1
/** @type {number} */
let open
/** @type {Token} */
let group
/** @type {Token} */
let text
/** @type {Token} */
let openingSequence
/** @type {Token} */
let closingSequence
/** @type {number} */
let use
/** @type {Array} */
let nextEvents
/** @type {number} */
let offset
// Walk through all events.
//
// Note: performance of this is fine on an mb of normal markdown, but it’s
// a bottleneck for malicious stuff.
while (++index < events.length) {
// Find a token that can close.
if (
events[index][0] === 'enter' &&
events[index][1].type === 'attentionSequence' &&
events[index][1]._close
) {
open = index
// Now walk back to find an opener.
while (open--) {
// Find a token that can open the closer.
if (
events[open][0] === 'exit' &&
events[open][1].type === 'attentionSequence' &&
events[open][1]._open &&
// If the markers are the same:
context.sliceSerialize(events[open][1]).charCodeAt(0) ===
context.sliceSerialize(events[index][1]).charCodeAt(0)
) {
// If the opening can close or the closing can open,
// and the close size *is not* a multiple of three,
// but the sum of the opening and closing size *is* multiple of three,
// then don’t match.
if (
(events[open][1]._close || events[index][1]._open) &&
(events[index][1].end.offset - events[index][1].start.offset) % 3 &&
!(
(events[open][1].end.offset -
events[open][1].start.offset +
events[index][1].end.offset -
events[index][1].start.offset) %
3
)
) {
continue
}
// Number of markers to use from the sequence.
use =
events[open][1].end.offset - events[open][1].start.offset > 1 &&
events[index][1].end.offset - events[index][1].start.offset > 1
? 2
: 1
const start = Object.assign({}, events[open][1].end)
const end = Object.assign({}, events[index][1].start)
movePoint(start, -use)
movePoint(end, use)
openingSequence = {
type: use > 1 ? 'strongSequence' : 'emphasisSequence',
start,
end: Object.assign({}, events[open][1].end)
}
closingSequence = {
type: use > 1 ? 'strongSequence' : 'emphasisSequence',
start: Object.assign({}, events[index][1].start),
end
}
text = {
type: use > 1 ? 'strongText' : 'emphasisText',
start: Object.assign({}, events[open][1].end),
end: Object.assign({}, events[index][1].start)
}
group = {
type: use > 1 ? 'strong' : 'emphasis',
start: Object.assign({}, openingSequence.start),
end: Object.assign({}, closingSequence.end)
}
events[open][1].end = Object.assign({}, openingSequence.start)
events[index][1].start = Object.assign({}, closingSequence.end)
nextEvents = []
// If there are more markers in the opening, add them before.
if (events[open][1].end.offset - events[open][1].start.offset) {
nextEvents = push(nextEvents, [
['enter', events[open][1], context],
['exit', events[open][1], context]
])
}
// Opening.
nextEvents = push(nextEvents, [
['enter', group, context],
['enter', openingSequence, context],
['exit', openingSequence, context],
['enter', text, context]
])
// Always populated by defaults.
// Between.
nextEvents = push(
nextEvents,
resolveAll(
context.parser.constructs.insideSpan.null,
events.slice(open + 1, index),
context
)
)
// Closing.
nextEvents = push(nextEvents, [
['exit', text, context],
['enter', closingSequence, context],
['exit', closingSequence, context],
['exit', group, context]
])
// If there are more markers in the closing, add them after.
if (events[index][1].end.offset - events[index][1].start.offset) {
offset = 2
nextEvents = push(nextEvents, [
['enter', events[index][1], context],
['exit', events[index][1], context]
])
} else {
offset = 0
}
splice(events, open - 1, index - open + 3, nextEvents)
index = open + nextEvents.length - offset - 2
break
}
}
}
}
// Remove remaining sequences.
index = -1
while (++index < events.length) {
if (events[index][1].type === 'attentionSequence') {
events[index][1].type = 'data'
}
}
return events
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeAttention(effects, ok) {
const attentionMarkers = this.parser.constructs.attentionMarkers.null
const previous = this.previous
const before = classifyCharacter(previous)
/** @type {NonNullable} */
let marker
return start
/**
* Before a sequence.
*
* ```markdown
* > | **
* ^
* ```
*
* @type {State}
*/
function start(code) {
marker = code
effects.enter('attentionSequence')
return inside(code)
}
/**
* In a sequence.
*
* ```markdown
* > | **
* ^^
* ```
*
* @type {State}
*/
function inside(code) {
if (code === marker) {
effects.consume(code)
return inside
}
const token = effects.exit('attentionSequence')
// To do: next major: move this to resolver, just like `markdown-rs`.
const after = classifyCharacter(code)
// Always populated by defaults.
const open =
!after || (after === 2 && before) || attentionMarkers.includes(code)
const close =
!before || (before === 2 && after) || attentionMarkers.includes(previous)
token._open = Boolean(marker === 42 ? open : open && (before || !close))
token._close = Boolean(marker === 42 ? close : close && (after || !open))
return ok(code)
}
}
/**
* Move a point a bit.
*
* Note: `move` only works inside lines! It’s not possible to move past other
* chunks (replacement characters, tabs, or line endings).
*
* @param {Point} point
* @param {number} offset
* @returns {void}
*/
function movePoint(point, offset) {
point.column += offset
point.offset += offset
point._bufferIndex += offset
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/autolink.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const autolink = {
name: 'autolink',
tokenize: tokenizeAutolink
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeAutolink(effects, ok, nok) {
let size = 0
return start
/**
* Start of an autolink.
*
* ```markdown
* > | ab
* ^
* > | ab
* ^
* ```
*
* @type {State}
*/
function start(code) {
effects.enter('autolink')
effects.enter('autolinkMarker')
effects.consume(code)
effects.exit('autolinkMarker')
effects.enter('autolinkProtocol')
return open
}
/**
* After `<`, at protocol or atext.
*
* ```markdown
* > | ab
* ^
* > | ab
* ^
* ```
*
* @type {State}
*/
function open(code) {
if (asciiAlpha(code)) {
effects.consume(code)
return schemeOrEmailAtext
}
return emailAtext(code)
}
/**
* At second byte of protocol or atext.
*
* ```markdown
* > | ab
* ^
* > | ab
* ^
* ```
*
* @type {State}
*/
function schemeOrEmailAtext(code) {
// ASCII alphanumeric and `+`, `-`, and `.`.
if (code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) {
// Count the previous alphabetical from `open` too.
size = 1
return schemeInsideOrEmailAtext(code)
}
return emailAtext(code)
}
/**
* In ambiguous protocol or atext.
*
* ```markdown
* > | ab
* ^
* > | ab
* ^
* ```
*
* @type {State}
*/
function schemeInsideOrEmailAtext(code) {
if (code === 58) {
effects.consume(code)
size = 0
return urlInside
}
// ASCII alphanumeric and `+`, `-`, and `.`.
if (
(code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) &&
size++ < 32
) {
effects.consume(code)
return schemeInsideOrEmailAtext
}
size = 0
return emailAtext(code)
}
/**
* After protocol, in URL.
*
* ```markdown
* > | ab
* ^
* ```
*
* @type {State}
*/
function urlInside(code) {
if (code === 62) {
effects.exit('autolinkProtocol')
effects.enter('autolinkMarker')
effects.consume(code)
effects.exit('autolinkMarker')
effects.exit('autolink')
return ok
}
// ASCII control, space, or `<`.
if (code === null || code === 32 || code === 60 || asciiControl(code)) {
return nok(code)
}
effects.consume(code)
return urlInside
}
/**
* In email atext.
*
* ```markdown
* > | ab
* ^
* ```
*
* @type {State}
*/
function emailAtext(code) {
if (code === 64) {
effects.consume(code)
return emailAtSignOrDot
}
if (asciiAtext(code)) {
effects.consume(code)
return emailAtext
}
return nok(code)
}
/**
* In label, after at-sign or dot.
*
* ```markdown
* > | ab
* ^ ^
* ```
*
* @type {State}
*/
function emailAtSignOrDot(code) {
return asciiAlphanumeric(code) ? emailLabel(code) : nok(code)
}
/**
* In label, where `.` and `>` are allowed.
*
* ```markdown
* > | ab
* ^
* ```
*
* @type {State}
*/
function emailLabel(code) {
if (code === 46) {
effects.consume(code)
size = 0
return emailAtSignOrDot
}
if (code === 62) {
// Exit, then change the token type.
effects.exit('autolinkProtocol').type = 'autolinkEmail'
effects.enter('autolinkMarker')
effects.consume(code)
effects.exit('autolinkMarker')
effects.exit('autolink')
return ok
}
return emailValue(code)
}
/**
* In label, where `.` and `>` are *not* allowed.
*
* Though, this is also used in `emailLabel` to parse other values.
*
* ```markdown
* > | ab
* ^
* ```
*
* @type {State}
*/
function emailValue(code) {
// ASCII alphanumeric or `-`.
if ((code === 45 || asciiAlphanumeric(code)) && size++ < 63) {
const next = code === 45 ? emailValue : emailLabel
effects.consume(code)
return next
}
return nok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/html-text.js
/**
* @typedef {import('micromark-util-types').Code} Code
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const htmlText = {
name: 'htmlText',
tokenize: tokenizeHtmlText
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeHtmlText(effects, ok, nok) {
const self = this
/** @type {NonNullable | undefined} */
let marker
/** @type {number} */
let index
/** @type {State} */
let returnState
return start
/**
* Start of HTML (text).
*
* ```markdown
* > | a c
* ^
* ```
*
* @type {State}
*/
function start(code) {
effects.enter('htmlText')
effects.enter('htmlTextData')
effects.consume(code)
return open
}
/**
* After `<`, at tag name or other stuff.
*
* ```markdown
* > | a c
* ^
* > | a c
* ^
* > | a c
* ^
* ```
*
* @type {State}
*/
function open(code) {
if (code === 33) {
effects.consume(code)
return declarationOpen
}
if (code === 47) {
effects.consume(code)
return tagCloseStart
}
if (code === 63) {
effects.consume(code)
return instruction
}
// ASCII alphabetical.
if (asciiAlpha(code)) {
effects.consume(code)
return tagOpen
}
return nok(code)
}
/**
* After ` | a c
* ^
* > | a c
* ^
* > | a &<]]> c
* ^
* ```
*
* @type {State}
*/
function declarationOpen(code) {
if (code === 45) {
effects.consume(code)
return commentOpenInside
}
if (code === 91) {
effects.consume(code)
index = 0
return cdataOpenInside
}
if (asciiAlpha(code)) {
effects.consume(code)
return declaration
}
return nok(code)
}
/**
* In a comment, after ` | a c
* ^
* ```
*
* @type {State}
*/
function commentOpenInside(code) {
if (code === 45) {
effects.consume(code)
return commentEnd
}
return nok(code)
}
/**
* In comment.
*
* ```markdown
* > | a c
* ^
* ```
*
* @type {State}
*/
function comment(code) {
if (code === null) {
return nok(code)
}
if (code === 45) {
effects.consume(code)
return commentClose
}
if (markdownLineEnding(code)) {
returnState = comment
return lineEndingBefore(code)
}
effects.consume(code)
return comment
}
/**
* In comment, after `-`.
*
* ```markdown
* > | a c
* ^
* ```
*
* @type {State}
*/
function commentClose(code) {
if (code === 45) {
effects.consume(code)
return commentEnd
}
return comment(code)
}
/**
* In comment, after `--`.
*
* ```markdown
* > | a c
* ^
* ```
*
* @type {State}
*/
function commentEnd(code) {
return code === 62
? end(code)
: code === 45
? commentClose(code)
: comment(code)
}
/**
* After ` | a &<]]> b
* ^^^^^^
* ```
*
* @type {State}
*/
function cdataOpenInside(code) {
const value = 'CDATA['
if (code === value.charCodeAt(index++)) {
effects.consume(code)
return index === value.length ? cdata : cdataOpenInside
}
return nok(code)
}
/**
* In CDATA.
*
* ```markdown
* > | a &<]]> b
* ^^^
* ```
*
* @type {State}
*/
function cdata(code) {
if (code === null) {
return nok(code)
}
if (code === 93) {
effects.consume(code)
return cdataClose
}
if (markdownLineEnding(code)) {
returnState = cdata
return lineEndingBefore(code)
}
effects.consume(code)
return cdata
}
/**
* In CDATA, after `]`, at another `]`.
*
* ```markdown
* > | a &<]]> b
* ^
* ```
*
* @type {State}
*/
function cdataClose(code) {
if (code === 93) {
effects.consume(code)
return cdataEnd
}
return cdata(code)
}
/**
* In CDATA, after `]]`, at `>`.
*
* ```markdown
* > | a &<]]> b
* ^
* ```
*
* @type {State}
*/
function cdataEnd(code) {
if (code === 62) {
return end(code)
}
if (code === 93) {
effects.consume(code)
return cdataEnd
}
return cdata(code)
}
/**
* In declaration.
*
* ```markdown
* > | a c
* ^
* ```
*
* @type {State}
*/
function declaration(code) {
if (code === null || code === 62) {
return end(code)
}
if (markdownLineEnding(code)) {
returnState = declaration
return lineEndingBefore(code)
}
effects.consume(code)
return declaration
}
/**
* In instruction.
*
* ```markdown
* > | a c
* ^
* ```
*
* @type {State}
*/
function instruction(code) {
if (code === null) {
return nok(code)
}
if (code === 63) {
effects.consume(code)
return instructionClose
}
if (markdownLineEnding(code)) {
returnState = instruction
return lineEndingBefore(code)
}
effects.consume(code)
return instruction
}
/**
* In instruction, after `?`, at `>`.
*
* ```markdown
* > | a c
* ^
* ```
*
* @type {State}
*/
function instructionClose(code) {
return code === 62 ? end(code) : instruction(code)
}
/**
* After ``, in closing tag, at tag name.
*
* ```markdown
* > | a c
* ^
* ```
*
* @type {State}
*/
function tagCloseStart(code) {
// ASCII alphabetical.
if (asciiAlpha(code)) {
effects.consume(code)
return tagClose
}
return nok(code)
}
/**
* After ` | a c
* ^
* ```
*
* @type {State}
*/
function tagClose(code) {
// ASCII alphanumerical and `-`.
if (code === 45 || asciiAlphanumeric(code)) {
effects.consume(code)
return tagClose
}
return tagCloseBetween(code)
}
/**
* In closing tag, after tag name.
*
* ```markdown
* > | a c
* ^
* ```
*
* @type {State}
*/
function tagCloseBetween(code) {
if (markdownLineEnding(code)) {
returnState = tagCloseBetween
return lineEndingBefore(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagCloseBetween
}
return end(code)
}
/**
* After ` | a c
* ^
* ```
*
* @type {State}
*/
function tagOpen(code) {
// ASCII alphanumerical and `-`.
if (code === 45 || asciiAlphanumeric(code)) {
effects.consume(code)
return tagOpen
}
if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {
return tagOpenBetween(code)
}
return nok(code)
}
/**
* In opening tag, after tag name.
*
* ```markdown
* > | a c
* ^
* ```
*
* @type {State}
*/
function tagOpenBetween(code) {
if (code === 47) {
effects.consume(code)
return end
}
// ASCII alphabetical and `:` and `_`.
if (code === 58 || code === 95 || asciiAlpha(code)) {
effects.consume(code)
return tagOpenAttributeName
}
if (markdownLineEnding(code)) {
returnState = tagOpenBetween
return lineEndingBefore(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenBetween
}
return end(code)
}
/**
* In attribute name.
*
* ```markdown
* > | a d
* ^
* ```
*
* @type {State}
*/
function tagOpenAttributeName(code) {
// ASCII alphabetical and `-`, `.`, `:`, and `_`.
if (
code === 45 ||
code === 46 ||
code === 58 ||
code === 95 ||
asciiAlphanumeric(code)
) {
effects.consume(code)
return tagOpenAttributeName
}
return tagOpenAttributeNameAfter(code)
}
/**
* After attribute name, before initializer, the end of the tag, or
* whitespace.
*
* ```markdown
* > | a d
* ^
* ```
*
* @type {State}
*/
function tagOpenAttributeNameAfter(code) {
if (code === 61) {
effects.consume(code)
return tagOpenAttributeValueBefore
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeNameAfter
return lineEndingBefore(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenAttributeNameAfter
}
return tagOpenBetween(code)
}
/**
* Before unquoted, double quoted, or single quoted attribute value, allowing
* whitespace.
*
* ```markdown
* > | a e
* ^
* ```
*
* @type {State}
*/
function tagOpenAttributeValueBefore(code) {
if (
code === null ||
code === 60 ||
code === 61 ||
code === 62 ||
code === 96
) {
return nok(code)
}
if (code === 34 || code === 39) {
effects.consume(code)
marker = code
return tagOpenAttributeValueQuoted
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeValueBefore
return lineEndingBefore(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenAttributeValueBefore
}
effects.consume(code)
return tagOpenAttributeValueUnquoted
}
/**
* In double or single quoted attribute value.
*
* ```markdown
* > | a e
* ^
* ```
*
* @type {State}
*/
function tagOpenAttributeValueQuoted(code) {
if (code === marker) {
effects.consume(code)
marker = undefined
return tagOpenAttributeValueQuotedAfter
}
if (code === null) {
return nok(code)
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeValueQuoted
return lineEndingBefore(code)
}
effects.consume(code)
return tagOpenAttributeValueQuoted
}
/**
* In unquoted attribute value.
*
* ```markdown
* > | a e
* ^
* ```
*
* @type {State}
*/
function tagOpenAttributeValueUnquoted(code) {
if (
code === null ||
code === 34 ||
code === 39 ||
code === 60 ||
code === 61 ||
code === 96
) {
return nok(code)
}
if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {
return tagOpenBetween(code)
}
effects.consume(code)
return tagOpenAttributeValueUnquoted
}
/**
* After double or single quoted attribute value, before whitespace or the end
* of the tag.
*
* ```markdown
* > | a e
* ^
* ```
*
* @type {State}
*/
function tagOpenAttributeValueQuotedAfter(code) {
if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {
return tagOpenBetween(code)
}
return nok(code)
}
/**
* In certain circumstances of a tag where only an `>` is allowed.
*
* ```markdown
* > | a e
* ^
* ```
*
* @type {State}
*/
function end(code) {
if (code === 62) {
effects.consume(code)
effects.exit('htmlTextData')
effects.exit('htmlText')
return ok
}
return nok(code)
}
/**
* At eol.
*
* > 👉 **Note**: we can’t have blank lines in text, so no need to worry about
* > empty tokens.
*
* ```markdown
* > | a
* ```
*
* @type {State}
*/
function lineEndingBefore(code) {
effects.exit('htmlTextData')
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return lineEndingAfter
}
/**
* After eol, at optional whitespace.
*
* > 👉 **Note**: we can’t have blank lines in text, so no need to worry about
* > empty tokens.
*
* ```markdown
* | a
* ^
* ```
*
* @type {State}
*/
function lineEndingAfter(code) {
// Always populated by defaults.
return markdownSpace(code)
? factorySpace(
effects,
lineEndingAfterPrefix,
'linePrefix',
self.parser.constructs.disable.null.includes('codeIndented')
? undefined
: 4
)(code)
: lineEndingAfterPrefix(code)
}
/**
* After eol, after optional whitespace.
*
* > 👉 **Note**: we can’t have blank lines in text, so no need to worry about
* > empty tokens.
*
* ```markdown
* | a
* ^
* ```
*
* @type {State}
*/
function lineEndingAfterPrefix(code) {
effects.enter('htmlTextData')
return returnState(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/label-start-link.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const labelStartLink = {
name: 'labelStartLink',
tokenize: tokenizeLabelStartLink,
resolveAll: labelEnd.resolveAll
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeLabelStartLink(effects, ok, nok) {
const self = this
return start
/**
* Start of label (link) start.
*
* ```markdown
* > | a [b] c
* ^
* ```
*
* @type {State}
*/
function start(code) {
effects.enter('labelLink')
effects.enter('labelMarker')
effects.consume(code)
effects.exit('labelMarker')
effects.exit('labelLink')
return after
}
/** @type {State} */
function after(code) {
// To do: this isn’t needed in `micromark-extension-gfm-footnote`,
// remove.
// Hidden footnotes hook.
/* c8 ignore next 3 */
return code === 94 && '_hiddenFootnoteSupport' in self.parser.constructs
? nok(code)
: ok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/hard-break-escape.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const hardBreakEscape = {
name: 'hardBreakEscape',
tokenize: tokenizeHardBreakEscape
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeHardBreakEscape(effects, ok, nok) {
return start
/**
* Start of a hard break (escape).
*
* ```markdown
* > | a\
* ^
* | b
* ```
*
* @type {State}
*/
function start(code) {
effects.enter('hardBreakEscape')
effects.consume(code)
return after
}
/**
* After `\`, at eol.
*
* ```markdown
* > | a\
* ^
* | b
* ```
*
* @type {State}
*/
function after(code) {
if (markdownLineEnding(code)) {
effects.exit('hardBreakEscape')
return ok(code)
}
return nok(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-core-commonmark/lib/code-text.js
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').Previous} Previous
* @typedef {import('micromark-util-types').Resolver} Resolver
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').Token} Token
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
/** @type {Construct} */
const codeText = {
name: 'codeText',
tokenize: tokenizeCodeText,
resolve: resolveCodeText,
previous
}
// To do: next major: don’t resolve, like `markdown-rs`.
/** @type {Resolver} */
function resolveCodeText(events) {
let tailExitIndex = events.length - 4
let headEnterIndex = 3
/** @type {number} */
let index
/** @type {number | undefined} */
let enter
// If we start and end with an EOL or a space.
if (
(events[headEnterIndex][1].type === 'lineEnding' ||
events[headEnterIndex][1].type === 'space') &&
(events[tailExitIndex][1].type === 'lineEnding' ||
events[tailExitIndex][1].type === 'space')
) {
index = headEnterIndex
// And we have data.
while (++index < tailExitIndex) {
if (events[index][1].type === 'codeTextData') {
// Then we have padding.
events[headEnterIndex][1].type = 'codeTextPadding'
events[tailExitIndex][1].type = 'codeTextPadding'
headEnterIndex += 2
tailExitIndex -= 2
break
}
}
}
// Merge adjacent spaces and data.
index = headEnterIndex - 1
tailExitIndex++
while (++index <= tailExitIndex) {
if (enter === undefined) {
if (index !== tailExitIndex && events[index][1].type !== 'lineEnding') {
enter = index
}
} else if (
index === tailExitIndex ||
events[index][1].type === 'lineEnding'
) {
events[enter][1].type = 'codeTextData'
if (index !== enter + 2) {
events[enter][1].end = events[index - 1][1].end
events.splice(enter + 2, index - enter - 2)
tailExitIndex -= index - enter - 2
index = enter + 2
}
enter = undefined
}
}
return events
}
/**
* @this {TokenizeContext}
* @type {Previous}
*/
function previous(code) {
// If there is a previous code, there will always be a tail.
return (
code !== 96 ||
this.events[this.events.length - 1][1].type === 'characterEscape'
)
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeCodeText(effects, ok, nok) {
const self = this
let sizeOpen = 0
/** @type {number} */
let size
/** @type {Token} */
let token
return start
/**
* Start of code (text).
*
* ```markdown
* > | `a`
* ^
* > | \`a`
* ^
* ```
*
* @type {State}
*/
function start(code) {
effects.enter('codeText')
effects.enter('codeTextSequence')
return sequenceOpen(code)
}
/**
* In opening sequence.
*
* ```markdown
* > | `a`
* ^
* ```
*
* @type {State}
*/
function sequenceOpen(code) {
if (code === 96) {
effects.consume(code)
sizeOpen++
return sequenceOpen
}
effects.exit('codeTextSequence')
return between(code)
}
/**
* Between something and something else.
*
* ```markdown
* > | `a`
* ^^
* ```
*
* @type {State}
*/
function between(code) {
// EOF.
if (code === null) {
return nok(code)
}
// To do: next major: don’t do spaces in resolve, but when compiling,
// like `markdown-rs`.
// Tabs don’t work, and virtual spaces don’t make sense.
if (code === 32) {
effects.enter('space')
effects.consume(code)
effects.exit('space')
return between
}
// Closing fence? Could also be data.
if (code === 96) {
token = effects.enter('codeTextSequence')
size = 0
return sequenceClose(code)
}
if (markdownLineEnding(code)) {
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return between
}
// Data.
effects.enter('codeTextData')
return data(code)
}
/**
* In data.
*
* ```markdown
* > | `a`
* ^
* ```
*
* @type {State}
*/
function data(code) {
if (
code === null ||
code === 32 ||
code === 96 ||
markdownLineEnding(code)
) {
effects.exit('codeTextData')
return between(code)
}
effects.consume(code)
return data
}
/**
* In closing sequence.
*
* ```markdown
* > | `a`
* ^
* ```
*
* @type {State}
*/
function sequenceClose(code) {
// More.
if (code === 96) {
effects.consume(code)
size++
return sequenceClose
}
// Done!
if (size === sizeOpen) {
effects.exit('codeTextSequence')
effects.exit('codeText')
return ok(code)
}
// More or less accents: mark as data.
token.type = 'codeTextData'
return data(code)
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark/lib/constructs.js
/**
* @typedef {import('micromark-util-types').Extension} Extension
*/
/** @satisfies {Extension['document']} */
const constructs_document = {
[42]: list,
[43]: list,
[45]: list,
[48]: list,
[49]: list,
[50]: list,
[51]: list,
[52]: list,
[53]: list,
[54]: list,
[55]: list,
[56]: list,
[57]: list,
[62]: blockQuote
}
/** @satisfies {Extension['contentInitial']} */
const contentInitial = {
[91]: definition
}
/** @satisfies {Extension['flowInitial']} */
const flowInitial = {
[-2]: codeIndented,
[-1]: codeIndented,
[32]: codeIndented
}
/** @satisfies {Extension['flow']} */
const constructs_flow = {
[35]: headingAtx,
[42]: thematicBreak,
[45]: [setextUnderline, thematicBreak],
[60]: htmlFlow,
[61]: setextUnderline,
[95]: thematicBreak,
[96]: codeFenced,
[126]: codeFenced
}
/** @satisfies {Extension['string']} */
const constructs_string = {
[38]: characterReference,
[92]: characterEscape
}
/** @satisfies {Extension['text']} */
const constructs_text = {
[-5]: lineEnding,
[-4]: lineEnding,
[-3]: lineEnding,
[33]: labelStartImage,
[38]: characterReference,
[42]: attention,
[60]: [autolink, htmlText],
[91]: labelStartLink,
[92]: [hardBreakEscape, characterEscape],
[93]: labelEnd,
[95]: attention,
[96]: codeText
}
/** @satisfies {Extension['insideSpan']} */
const insideSpan = {
null: [attention, resolver]
}
/** @satisfies {Extension['attentionMarkers']} */
const attentionMarkers = {
null: [42, 95]
}
/** @satisfies {Extension['disable']} */
const disable = {
null: []
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark/lib/parse.js
/**
* @typedef {import('micromark-util-types').Create} Create
* @typedef {import('micromark-util-types').FullNormalizedExtension} FullNormalizedExtension
* @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct
* @typedef {import('micromark-util-types').ParseContext} ParseContext
* @typedef {import('micromark-util-types').ParseOptions} ParseOptions
*/
/**
* @param {ParseOptions | null | undefined} [options]
* @returns {ParseContext}
*/
function parse(options) {
const settings = options || {}
const constructs =
/** @type {FullNormalizedExtension} */
combineExtensions([constructs_namespaceObject, ...(settings.extensions || [])])
/** @type {ParseContext} */
const parser = {
defined: [],
lazy: {},
constructs,
content: create(content),
document: create(document_document),
flow: create(flow),
string: create(string),
text: create(text_text)
}
return parser
/**
* @param {InitialConstruct} initial
*/
function create(initial) {
return creator
/** @type {Create} */
function creator(from) {
return createTokenizer(parser, initial, from)
}
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark/lib/preprocess.js
/**
* @typedef {import('micromark-util-types').Chunk} Chunk
* @typedef {import('micromark-util-types').Code} Code
* @typedef {import('micromark-util-types').Encoding} Encoding
* @typedef {import('micromark-util-types').Value} Value
*/
/**
* @callback Preprocessor
* @param {Value} value
* @param {Encoding | null | undefined} [encoding]
* @param {boolean | null | undefined} [end=false]
* @returns {Array}
*/
const search = /[\0\t\n\r]/g
/**
* @returns {Preprocessor}
*/
function preprocess() {
let column = 1
let buffer = ''
/** @type {boolean | undefined} */
let start = true
/** @type {boolean | undefined} */
let atCarriageReturn
return preprocessor
/** @type {Preprocessor} */
function preprocessor(value, encoding, end) {
/** @type {Array} */
const chunks = []
/** @type {RegExpMatchArray | null} */
let match
/** @type {number} */
let next
/** @type {number} */
let startPosition
/** @type {number} */
let endPosition
/** @type {Code} */
let code
// @ts-expect-error `Buffer` does allow an encoding.
value = buffer + value.toString(encoding)
startPosition = 0
buffer = ''
if (start) {
// To do: `markdown-rs` actually parses BOMs (byte order mark).
if (value.charCodeAt(0) === 65279) {
startPosition++
}
start = undefined
}
while (startPosition < value.length) {
search.lastIndex = startPosition
match = search.exec(value)
endPosition =
match && match.index !== undefined ? match.index : value.length
code = value.charCodeAt(endPosition)
if (!match) {
buffer = value.slice(startPosition)
break
}
if (code === 10 && startPosition === endPosition && atCarriageReturn) {
chunks.push(-3)
atCarriageReturn = undefined
} else {
if (atCarriageReturn) {
chunks.push(-5)
atCarriageReturn = undefined
}
if (startPosition < endPosition) {
chunks.push(value.slice(startPosition, endPosition))
column += endPosition - startPosition
}
switch (code) {
case 0: {
chunks.push(65533)
column++
break
}
case 9: {
next = Math.ceil(column / 4) * 4
chunks.push(-2)
while (column++ < next) chunks.push(-1)
break
}
case 10: {
chunks.push(-4)
column = 1
break
}
default: {
atCarriageReturn = true
column = 1
}
}
}
startPosition = endPosition + 1
}
if (end) {
if (atCarriageReturn) chunks.push(-5)
if (buffer) chunks.push(buffer)
chunks.push(null)
}
return chunks
}
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark/lib/postprocess.js
/**
* @typedef {import('micromark-util-types').Event} Event
*/
/**
* @param {Array} events
* @returns {Array}
*/
function postprocess(events) {
while (!subtokenize(events)) {
// Empty
}
return events
}
;// CONCATENATED MODULE: ./node_modules/micromark-util-decode-numeric-character-reference/index.js
/**
* Turn the number (in string form as either hexa- or plain decimal) coming from
* a numeric character reference into a character.
*
* Sort of like `String.fromCharCode(Number.parseInt(value, base))`, but makes
* non-characters and control characters safe.
*
* @param {string} value
* Value to decode.
* @param {number} base
* Numeric base.
* @returns {string}
* Character.
*/
function decodeNumericCharacterReference(value, base) {
const code = Number.parseInt(value, base)
if (
// C0 except for HT, LF, FF, CR, space.
code < 9 ||
code === 11 ||
(code > 13 && code < 32) ||
// Control character (DEL) of C0, and C1 controls.
(code > 126 && code < 160) ||
// Lone high surrogates and low surrogates.
(code > 55295 && code < 57344) ||
// Noncharacters.
(code > 64975 && code < 65008) /* eslint-disable no-bitwise */ ||
(code & 65535) === 65535 ||
(code & 65535) === 65534 /* eslint-enable no-bitwise */ ||
// Out of range
code > 1114111
) {
return '\uFFFD'
}
return String.fromCharCode(code)
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/micromark-util-decode-string/index.js
const characterEscapeOrReference =
/\\([!-/:-@[-`{-~])|&(#(?:\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});/gi
/**
* Decode markdown strings (which occur in places such as fenced code info
* strings, destinations, labels, and titles).
*
* The “string” content type allows character escapes and -references.
* This decodes those.
*
* @param {string} value
* Value to decode.
* @returns {string}
* Decoded value.
*/
function decodeString(value) {
return value.replace(characterEscapeOrReference, decode)
}
/**
* @param {string} $0
* @param {string} $1
* @param {string} $2
* @returns {string}
*/
function decode($0, $1, $2) {
if ($1) {
// Escape.
return $1
}
// Reference.
const head = $2.charCodeAt(0)
if (head === 35) {
const head = $2.charCodeAt(1)
const hex = head === 120 || head === 88
return decodeNumericCharacterReference($2.slice(hex ? 2 : 1), hex ? 16 : 10)
}
return decodeNamedCharacterReference($2) || $0
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/unist-util-stringify-position/lib/index.js
/**
* @typedef {import('unist').Node} Node
* @typedef {import('unist').Point} Point
* @typedef {import('unist').Position} Position
*/
/**
* @typedef NodeLike
* @property {string} type
* @property {PositionLike | null | undefined} [position]
*
* @typedef PositionLike
* @property {PointLike | null | undefined} [start]
* @property {PointLike | null | undefined} [end]
*
* @typedef PointLike
* @property {number | null | undefined} [line]
* @property {number | null | undefined} [column]
* @property {number | null | undefined} [offset]
*/
/**
* Serialize the positional info of a point, position (start and end points),
* or node.
*
* @param {Node | NodeLike | Position | PositionLike | Point | PointLike | null | undefined} [value]
* Node, position, or point.
* @returns {string}
* Pretty printed positional info of a node (`string`).
*
* In the format of a range `ls:cs-le:ce` (when given `node` or `position`)
* or a point `l:c` (when given `point`), where `l` stands for line, `c` for
* column, `s` for `start`, and `e` for end.
* An empty string (`''`) is returned if the given value is neither `node`,
* `position`, nor `point`.
*/
function stringifyPosition(value) {
// Nothing.
if (!value || typeof value !== 'object') {
return ''
}
// Node.
if ('position' in value || 'type' in value) {
return position(value.position)
}
// Position.
if ('start' in value || 'end' in value) {
return position(value)
}
// Point.
if ('line' in value || 'column' in value) {
return point(value)
}
// ?
return ''
}
/**
* @param {Point | PointLike | null | undefined} point
* @returns {string}
*/
function point(point) {
return index(point && point.line) + ':' + index(point && point.column)
}
/**
* @param {Position | PositionLike | null | undefined} pos
* @returns {string}
*/
function position(pos) {
return point(pos && pos.start) + '-' + point(pos && pos.end)
}
/**
* @param {number | null | undefined} value
* @returns {number}
*/
function index(value) {
return value && typeof value === 'number' ? value : 1
}
;// CONCATENATED MODULE: ./node_modules/mermaid/node_modules/mdast-util-from-markdown/lib/index.js
/**
* @typedef {import('micromark-util-types').Encoding} Encoding
* @typedef {import('micromark-util-types').Event} Event
* @typedef {import('micromark-util-types').ParseOptions} ParseOptions
* @typedef {import('micromark-util-types').Token} Token
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Value} Value
*
* @typedef {import('unist').Parent} UnistParent
* @typedef {import('unist').Point} Point
*
* @typedef {import('mdast').PhrasingContent} PhrasingContent
* @typedef {import('mdast').StaticPhrasingContent} StaticPhrasingContent
* @typedef {import('mdast').Content} Content
* @typedef {import('mdast').Break} Break
* @typedef {import('mdast').Blockquote} Blockquote
* @typedef {import('mdast').Code} Code
* @typedef {import('mdast').Definition} Definition
* @typedef {import('mdast').Emphasis} Emphasis
* @typedef {import('mdast').Heading} Heading
* @typedef {import('mdast').HTML} HTML
* @typedef {import('mdast').Image} Image
* @typedef {import('mdast').ImageReference} ImageReference
* @typedef {import('mdast').InlineCode} InlineCode
* @typedef {import('mdast').Link} Link
* @typedef {import('mdast').LinkReference} LinkReference
* @typedef {import('mdast').List} List
* @typedef {import('mdast').ListItem} ListItem
* @typedef {import('mdast').Paragraph} Paragraph
* @typedef {import('mdast').Root} Root
* @typedef {import('mdast').Strong} Strong
* @typedef {import('mdast').Text} Text
* @typedef {import('mdast').ThematicBreak} ThematicBreak
* @typedef {import('mdast').ReferenceType} ReferenceType
* @typedef {import('../index.js').CompileData} CompileData
*/
/**
* @typedef {Root | Content} Node
* @typedef {Extract} Parent
*
* @typedef {Omit & {type: 'fragment', children: Array}} Fragment
*/
/**
* @callback Transform
* Extra transform, to change the AST afterwards.
* @param {Root} tree
* Tree to transform.
* @returns {Root | undefined | null | void}
* New tree or nothing (in which case the current tree is used).
*
* @callback Handle
* Handle a token.
* @param {CompileContext} this
* Context.
* @param {Token} token
* Current token.
* @returns {void}
* Nothing.
*
* @typedef {Record} Handles
* Token types mapping to handles
*
* @callback OnEnterError
* Handle the case where the `right` token is open, but it is closed (by the
* `left` token) or because we reached the end of the document.
* @param {Omit} this
* Context.
* @param {Token | undefined} left
* Left token.
* @param {Token} right
* Right token.
* @returns {void}
* Nothing.
*
* @callback OnExitError
* Handle the case where the `right` token is open but it is closed by
* exiting the `left` token.
* @param {Omit} this
* Context.
* @param {Token} left
* Left token.
* @param {Token} right
* Right token.
* @returns {void}
* Nothing.
*
* @typedef {[Token, OnEnterError | undefined]} TokenTuple
* Open token on the stack, with an optional error handler for when
* that token isn’t closed properly.
*/
/**
* @typedef Config
* Configuration.
*
* We have our defaults, but extensions will add more.
* @property {Array} canContainEols
* Token types where line endings are used.
* @property {Handles} enter
* Opening handles.
* @property {Handles} exit
* Closing handles.
* @property {Array} transforms
* Tree transforms.
*
* @typedef {Partial} Extension
* Change how markdown tokens from micromark are turned into mdast.
*
* @typedef CompileContext
* mdast compiler context.
* @property {Array} stack
* Stack of nodes.
* @property {Array} tokenStack
* Stack of tokens.
* @property {(key: Key) => CompileData[Key]} getData
* Get data from the key/value store.
* @property {(key: Key, value?: CompileData[Key]) => void} setData
* Set data into the key/value store.
* @property {(this: CompileContext) => void} buffer
* Capture some of the output data.
* @property {(this: CompileContext) => string} resume
* Stop capturing and access the output data.
* @property {(this: CompileContext, node: Kind, token: Token, onError?: OnEnterError) => Kind} enter
* Enter a token.
* @property {(this: CompileContext, token: Token, onError?: OnExitError) => Node} exit
* Exit a token.
* @property {TokenizeContext['sliceSerialize']} sliceSerialize
* Get the string value of a token.
* @property {Config} config
* Configuration.
*
* @typedef FromMarkdownOptions
* Configuration for how to build mdast.
* @property {Array> | null | undefined} [mdastExtensions]
* Extensions for this utility to change how tokens are turned into a tree.
*
* @typedef {ParseOptions & FromMarkdownOptions} Options
* Configuration.
*/
// To do: micromark: create a registry of tokens?
// To do: next major: don’t return given `Node` from `enter`.
// To do: next major: remove setter/getter.
const lib_own = {}.hasOwnProperty
/**
* @param value
* Markdown to parse.
* @param encoding
* Character encoding for when `value` is `Buffer`.
* @param options
* Configuration.
* @returns
* mdast tree.
*/
const fromMarkdown =
/**
* @type {(
* ((value: Value, encoding: Encoding, options?: Options | null | undefined) => Root) &
* ((value: Value, options?: Options | null | undefined) => Root)
* )}
*/
/**
* @param {Value} value
* @param {Encoding | Options | null | undefined} [encoding]
* @param {Options | null | undefined} [options]
* @returns {Root}
*/
function (value, encoding, options) {
if (typeof encoding !== 'string') {
options = encoding
encoding = undefined
}
return compiler(options)(
postprocess(
parse(options).document().write(preprocess()(value, encoding, true))
)
)
}
/**
* Note this compiler only understand complete buffering, not streaming.
*
* @param {Options | null | undefined} [options]
*/
function compiler(options) {
/** @type {Config} */
const config = {
transforms: [],
canContainEols: ['emphasis', 'fragment', 'heading', 'paragraph', 'strong'],
enter: {
autolink: opener(link),
autolinkProtocol: onenterdata,
autolinkEmail: onenterdata,
atxHeading: opener(heading),
blockQuote: opener(blockQuote),
characterEscape: onenterdata,
characterReference: onenterdata,
codeFenced: opener(codeFlow),
codeFencedFenceInfo: buffer,
codeFencedFenceMeta: buffer,
codeIndented: opener(codeFlow, buffer),
codeText: opener(codeText, buffer),
codeTextData: onenterdata,
data: onenterdata,
codeFlowValue: onenterdata,
definition: opener(definition),
definitionDestinationString: buffer,
definitionLabelString: buffer,
definitionTitleString: buffer,
emphasis: opener(emphasis),
hardBreakEscape: opener(hardBreak),
hardBreakTrailing: opener(hardBreak),
htmlFlow: opener(html, buffer),
htmlFlowData: onenterdata,
htmlText: opener(html, buffer),
htmlTextData: onenterdata,
image: opener(image),
label: buffer,
link: opener(link),
listItem: opener(listItem),
listItemValue: onenterlistitemvalue,
listOrdered: opener(list, onenterlistordered),
listUnordered: opener(list),
paragraph: opener(paragraph),
reference: onenterreference,
referenceString: buffer,
resourceDestinationString: buffer,
resourceTitleString: buffer,
setextHeading: opener(heading),
strong: opener(strong),
thematicBreak: opener(thematicBreak)
},
exit: {
atxHeading: closer(),
atxHeadingSequence: onexitatxheadingsequence,
autolink: closer(),
autolinkEmail: onexitautolinkemail,
autolinkProtocol: onexitautolinkprotocol,
blockQuote: closer(),
characterEscapeValue: onexitdata,
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
characterReferenceValue: onexitcharacterreferencevalue,
codeFenced: closer(onexitcodefenced),
codeFencedFence: onexitcodefencedfence,
codeFencedFenceInfo: onexitcodefencedfenceinfo,
codeFencedFenceMeta: onexitcodefencedfencemeta,
codeFlowValue: onexitdata,
codeIndented: closer(onexitcodeindented),
codeText: closer(onexitcodetext),
codeTextData: onexitdata,
data: onexitdata,
definition: closer(),
definitionDestinationString: onexitdefinitiondestinationstring,
definitionLabelString: onexitdefinitionlabelstring,
definitionTitleString: onexitdefinitiontitlestring,
emphasis: closer(),
hardBreakEscape: closer(onexithardbreak),
hardBreakTrailing: closer(onexithardbreak),
htmlFlow: closer(onexithtmlflow),
htmlFlowData: onexitdata,
htmlText: closer(onexithtmltext),
htmlTextData: onexitdata,
image: closer(onexitimage),
label: onexitlabel,
labelText: onexitlabeltext,
lineEnding: onexitlineending,
link: closer(onexitlink),
listItem: closer(),
listOrdered: closer(),
listUnordered: closer(),
paragraph: closer(),
referenceString: onexitreferencestring,
resourceDestinationString: onexitresourcedestinationstring,
resourceTitleString: onexitresourcetitlestring,
resource: onexitresource,
setextHeading: closer(onexitsetextheading),
setextHeadingLineSequence: onexitsetextheadinglinesequence,
setextHeadingText: onexitsetextheadingtext,
strong: closer(),
thematicBreak: closer()
}
}
configure(config, (options || {}).mdastExtensions || [])
/** @type {CompileData} */
const data = {}
return compile
/**
* Turn micromark events into an mdast tree.
*
* @param {Array} events
* Events.
* @returns {Root}
* mdast tree.
*/
function compile(events) {
/** @type {Root} */
let tree = {
type: 'root',
children: []
}
/** @type {Omit} */
const context = {
stack: [tree],
tokenStack: [],
config,
enter,
exit,
buffer,
resume,
setData,
getData
}
/** @type {Array} */
const listStack = []
let index = -1
while (++index < events.length) {
// We preprocess lists to add `listItem` tokens, and to infer whether
// items the list itself are spread out.
if (
events[index][1].type === 'listOrdered' ||
events[index][1].type === 'listUnordered'
) {
if (events[index][0] === 'enter') {
listStack.push(index)
} else {
const tail = listStack.pop()
index = prepareList(events, tail, index)
}
}
}
index = -1
while (++index < events.length) {
const handler = config[events[index][0]]
if (lib_own.call(handler, events[index][1].type)) {
handler[events[index][1].type].call(
Object.assign(
{
sliceSerialize: events[index][2].sliceSerialize
},
context
),
events[index][1]
)
}
}
// Handle tokens still being open.
if (context.tokenStack.length > 0) {
const tail = context.tokenStack[context.tokenStack.length - 1]
const handler = tail[1] || defaultOnError
handler.call(context, undefined, tail[0])
}
// Figure out `root` position.
tree.position = {
start: lib_point(
events.length > 0
? events[0][1].start
: {
line: 1,
column: 1,
offset: 0
}
),
end: lib_point(
events.length > 0
? events[events.length - 2][1].end
: {
line: 1,
column: 1,
offset: 0
}
)
}
// Call transforms.
index = -1
while (++index < config.transforms.length) {
tree = config.transforms[index](tree) || tree
}
return tree
}
/**
* @param {Array} events
* @param {number} start
* @param {number} length
* @returns {number}
*/
function prepareList(events, start, length) {
let index = start - 1
let containerBalance = -1
let listSpread = false
/** @type {Token | undefined} */
let listItem
/** @type {number | undefined} */
let lineIndex
/** @type {number | undefined} */
let firstBlankLineIndex
/** @type {boolean | undefined} */
let atMarker
while (++index <= length) {
const event = events[index]
if (
event[1].type === 'listUnordered' ||
event[1].type === 'listOrdered' ||
event[1].type === 'blockQuote'
) {
if (event[0] === 'enter') {
containerBalance++
} else {
containerBalance--
}
atMarker = undefined
} else if (event[1].type === 'lineEndingBlank') {
if (event[0] === 'enter') {
if (
listItem &&
!atMarker &&
!containerBalance &&
!firstBlankLineIndex
) {
firstBlankLineIndex = index
}
atMarker = undefined
}
} else if (
event[1].type === 'linePrefix' ||
event[1].type === 'listItemValue' ||
event[1].type === 'listItemMarker' ||
event[1].type === 'listItemPrefix' ||
event[1].type === 'listItemPrefixWhitespace'
) {
// Empty.
} else {
atMarker = undefined
}
if (
(!containerBalance &&
event[0] === 'enter' &&
event[1].type === 'listItemPrefix') ||
(containerBalance === -1 &&
event[0] === 'exit' &&
(event[1].type === 'listUnordered' ||
event[1].type === 'listOrdered'))
) {
if (listItem) {
let tailIndex = index
lineIndex = undefined
while (tailIndex--) {
const tailEvent = events[tailIndex]
if (
tailEvent[1].type === 'lineEnding' ||
tailEvent[1].type === 'lineEndingBlank'
) {
if (tailEvent[0] === 'exit') continue
if (lineIndex) {
events[lineIndex][1].type = 'lineEndingBlank'
listSpread = true
}
tailEvent[1].type = 'lineEnding'
lineIndex = tailIndex
} else if (
tailEvent[1].type === 'linePrefix' ||
tailEvent[1].type === 'blockQuotePrefix' ||
tailEvent[1].type === 'blockQuotePrefixWhitespace' ||
tailEvent[1].type === 'blockQuoteMarker' ||
tailEvent[1].type === 'listItemIndent'
) {
// Empty
} else {
break
}
}
if (
firstBlankLineIndex &&
(!lineIndex || firstBlankLineIndex < lineIndex)
) {
listItem._spread = true
}
// Fix position.
listItem.end = Object.assign(
{},
lineIndex ? events[lineIndex][1].start : event[1].end
)
events.splice(lineIndex || index, 0, ['exit', listItem, event[2]])
index++
length++
}
// Create a new list item.
if (event[1].type === 'listItemPrefix') {
listItem = {
type: 'listItem',
_spread: false,
start: Object.assign({}, event[1].start),
// @ts-expect-error: we’ll add `end` in a second.
end: undefined
}
// @ts-expect-error: `listItem` is most definitely defined, TS...
events.splice(index, 0, ['enter', listItem, event[2]])
index++
length++
firstBlankLineIndex = undefined
atMarker = true
}
}
}
events[start][1]._spread = listSpread
return length
}
/**
* Set data.
*
* @template {keyof CompileData} Key
* Field type.
* @param {Key} key
* Key of field.
* @param {CompileData[Key]} [value]
* New value.
* @returns {void}
* Nothing.
*/
function setData(key, value) {
data[key] = value
}
/**
* Get data.
*
* @template {keyof CompileData} Key
* Field type.
* @param {Key} key
* Key of field.
* @returns {CompileData[Key]}
* Value.
*/
function getData(key) {
return data[key]
}
/**
* Create an opener handle.
*
* @param {(token: Token) => Node} create
* Create a node.
* @param {Handle} [and]
* Optional function to also run.
* @returns {Handle}
* Handle.
*/
function opener(create, and) {
return open
/**
* @this {CompileContext}
* @param {Token} token
* @returns {void}
*/
function open(token) {
enter.call(this, create(token), token)
if (and) and.call(this, token)
}
}
/**
* @this {CompileContext}
* @returns {void}
*/
function buffer() {
this.stack.push({
type: 'fragment',
children: []
})
}
/**
* @template {Node} Kind
* Node type.
* @this {CompileContext}
* Context.
* @param {Kind} node
* Node to enter.
* @param {Token} token
* Corresponding token.
* @param {OnEnterError | undefined} [errorHandler]
* Handle the case where this token is open, but it is closed by something else.
* @returns {Kind}
* The given node.
*/
function enter(node, token, errorHandler) {
const parent = this.stack[this.stack.length - 1]
// @ts-expect-error: Assume `Node` can exist as a child of `parent`.
parent.children.push(node)
this.stack.push(node)
this.tokenStack.push([token, errorHandler])
// @ts-expect-error: `end` will be patched later.
node.position = {
start: lib_point(token.start)
}
return node
}
/**
* Create a closer handle.
*
* @param {Handle} [and]
* Optional function to also run.
* @returns {Handle}
* Handle.
*/
function closer(and) {
return close
/**
* @this {CompileContext}
* @param {Token} token
* @returns {void}
*/
function close(token) {
if (and) and.call(this, token)
exit.call(this, token)
}
}
/**
* @this {CompileContext}
* Context.
* @param {Token} token
* Corresponding token.
* @param {OnExitError | undefined} [onExitError]
* Handle the case where another token is open.
* @returns {Node}
* The closed node.
*/
function exit(token, onExitError) {
const node = this.stack.pop()
const open = this.tokenStack.pop()
if (!open) {
throw new Error(
'Cannot close `' +
token.type +
'` (' +
stringifyPosition({
start: token.start,
end: token.end
}) +
'): it’s not open'
)
} else if (open[0].type !== token.type) {
if (onExitError) {
onExitError.call(this, token, open[0])
} else {
const handler = open[1] || defaultOnError
handler.call(this, token, open[0])
}
}
node.position.end = lib_point(token.end)
return node
}
/**
* @this {CompileContext}
* @returns {string}
*/
function resume() {
return lib_toString(this.stack.pop())
}
//
// Handlers.
//
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterlistordered() {
setData('expectingFirstListItemValue', true)
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterlistitemvalue(token) {
if (getData('expectingFirstListItemValue')) {
const ancestor = this.stack[this.stack.length - 2]
ancestor.start = Number.parseInt(this.sliceSerialize(token), 10)
setData('expectingFirstListItemValue')
}
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefencedfenceinfo() {
const data = this.resume()
const node = this.stack[this.stack.length - 1]
node.lang = data
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefencedfencemeta() {
const data = this.resume()
const node = this.stack[this.stack.length - 1]
node.meta = data
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefencedfence() {
// Exit if this is the closing fence.
if (getData('flowCodeInside')) return
this.buffer()
setData('flowCodeInside', true)
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefenced() {
const data = this.resume()
const node = this.stack[this.stack.length - 1]
node.value = data.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, '')
setData('flowCodeInside')
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodeindented() {
const data = this.resume()
const node = this.stack[this.stack.length - 1]
node.value = data.replace(/(\r?\n|\r)$/g, '')
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdefinitionlabelstring(token) {
const label = this.resume()
const node = this.stack[this.stack.length - 1]
node.label = label
node.identifier = normalizeIdentifier(
this.sliceSerialize(token)
).toLowerCase()
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdefinitiontitlestring() {
const data = this.resume()
const node = this.stack[this.stack.length - 1]
node.title = data
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdefinitiondestinationstring() {
const data = this.resume()
const node = this.stack[this.stack.length - 1]
node.url = data
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitatxheadingsequence(token) {
const node = this.stack[this.stack.length - 1]
if (!node.depth) {
const depth = this.sliceSerialize(token).length
node.depth = depth
}
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitsetextheadingtext() {
setData('setextHeadingSlurpLineEnding', true)
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitsetextheadinglinesequence(token) {
const node = this.stack[this.stack.length - 1]
node.depth = this.sliceSerialize(token).charCodeAt(0) === 61 ? 1 : 2
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitsetextheading() {
setData('setextHeadingSlurpLineEnding')
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterdata(token) {
const node = this.stack[this.stack.length - 1]
let tail = node.children[node.children.length - 1]
if (!tail || tail.type !== 'text') {
// Add a new text node.
tail = text()
// @ts-expect-error: we’ll add `end` later.
tail.position = {
start: lib_point(token.start)
}
// @ts-expect-error: Assume `parent` accepts `text`.
node.children.push(tail)
}
this.stack.push(tail)
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdata(token) {
const tail = this.stack.pop()
tail.value += this.sliceSerialize(token)
tail.position.end = lib_point(token.end)
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlineending(token) {
const context = this.stack[this.stack.length - 1]
// If we’re at a hard break, include the line ending in there.
if (getData('atHardBreak')) {
const tail = context.children[context.children.length - 1]
tail.position.end = lib_point(token.end)
setData('atHardBreak')
return
}
if (
!getData('setextHeadingSlurpLineEnding') &&
config.canContainEols.includes(context.type)
) {
onenterdata.call(this, token)
onexitdata.call(this, token)
}
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexithardbreak() {
setData('atHardBreak', true)
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexithtmlflow() {
const data = this.resume()
const node = this.stack[this.stack.length - 1]
node.value = data
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexithtmltext() {
const data = this.resume()
const node = this.stack[this.stack.length - 1]
node.value = data
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodetext() {
const data = this.resume()
const node = this.stack[this.stack.length - 1]
node.value = data
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlink() {
const node = this.stack[this.stack.length - 1]
// Note: there are also `identifier` and `label` fields on this link node!
// These are used / cleaned here.
// To do: clean.
if (getData('inReference')) {
/** @type {ReferenceType} */
const referenceType = getData('referenceType') || 'shortcut'
node.type += 'Reference'
// @ts-expect-error: mutate.
node.referenceType = referenceType
// @ts-expect-error: mutate.
delete node.url
delete node.title
} else {
// @ts-expect-error: mutate.
delete node.identifier
// @ts-expect-error: mutate.
delete node.label
}
setData('referenceType')
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitimage() {
const node = this.stack[this.stack.length - 1]
// Note: there are also `identifier` and `label` fields on this link node!
// These are used / cleaned here.
// To do: clean.
if (getData('inReference')) {
/** @type {ReferenceType} */
const referenceType = getData('referenceType') || 'shortcut'
node.type += 'Reference'
// @ts-expect-error: mutate.
node.referenceType = referenceType
// @ts-expect-error: mutate.
delete node.url
delete node.title
} else {
// @ts-expect-error: mutate.
delete node.identifier
// @ts-expect-error: mutate.
delete node.label
}
setData('referenceType')
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlabeltext(token) {
const string = this.sliceSerialize(token)
const ancestor = this.stack[this.stack.length - 2]
// @ts-expect-error: stash this on the node, as it might become a reference
// later.
ancestor.label = decodeString(string)
// @ts-expect-error: same as above.
ancestor.identifier = normalizeIdentifier(string).toLowerCase()
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlabel() {
const fragment = this.stack[this.stack.length - 1]
const value = this.resume()
const node = this.stack[this.stack.length - 1]
// Assume a reference.
setData('inReference', true)
if (node.type === 'link') {
/** @type {Array} */
// @ts-expect-error: Assume static phrasing content.
const children = fragment.children
node.children = children
} else {
node.alt = value
}
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitresourcedestinationstring() {
const data = this.resume()
const node = this.stack[this.stack.length - 1]
node.url = data
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitresourcetitlestring() {
const data = this.resume()
const node = this.stack[this.stack.length - 1]
node.title = data
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitresource() {
setData('inReference')
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterreference() {
setData('referenceType', 'collapsed')
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitreferencestring(token) {
const label = this.resume()
const node = this.stack[this.stack.length - 1]
// @ts-expect-error: stash this on the node, as it might become a reference
// later.
node.label = label
// @ts-expect-error: same as above.
node.identifier = normalizeIdentifier(
this.sliceSerialize(token)
).toLowerCase()
setData('referenceType', 'full')
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcharacterreferencemarker(token) {
setData('characterReferenceType', token.type)
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcharacterreferencevalue(token) {
const data = this.sliceSerialize(token)
const type = getData('characterReferenceType')
/** @type {string} */
let value
if (type) {
value = decodeNumericCharacterReference(
data,
type === 'characterReferenceMarkerNumeric' ? 10 : 16
)
setData('characterReferenceType')
} else {
const result = decodeNamedCharacterReference(data)
value = result
}
const tail = this.stack.pop()
tail.value += value
tail.position.end = lib_point(token.end)
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitautolinkprotocol(token) {
onexitdata.call(this, token)
const node = this.stack[this.stack.length - 1]
node.url = this.sliceSerialize(token)
}
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitautolinkemail(token) {
onexitdata.call(this, token)
const node = this.stack[this.stack.length - 1]
node.url = 'mailto:' + this.sliceSerialize(token)
}
//
// Creaters.
//
/** @returns {Blockquote} */
function blockQuote() {
return {
type: 'blockquote',
children: []
}
}
/** @returns {Code} */
function codeFlow() {
return {
type: 'code',
lang: null,
meta: null,
value: ''
}
}
/** @returns {InlineCode} */
function codeText() {
return {
type: 'inlineCode',
value: ''
}
}
/** @returns {Definition} */
function definition() {
return {
type: 'definition',
identifier: '',
label: null,
title: null,
url: ''
}
}
/** @returns {Emphasis} */
function emphasis() {
return {
type: 'emphasis',
children: []
}
}
/** @returns {Heading} */
function heading() {
// @ts-expect-error `depth` will be set later.
return {
type: 'heading',
depth: undefined,
children: []
}
}
/** @returns {Break} */
function hardBreak() {
return {
type: 'break'
}
}
/** @returns {HTML} */
function html() {
return {
type: 'html',
value: ''
}
}
/** @returns {Image} */
function image() {
return {
type: 'image',
title: null,
url: '',
alt: null
}
}
/** @returns {Link} */
function link() {
return {
type: 'link',
title: null,
url: '',
children: []
}
}
/**
* @param {Token} token
* @returns {List}
*/
function list(token) {
return {
type: 'list',
ordered: token.type === 'listOrdered',
start: null,
spread: token._spread,
children: []
}
}
/**
* @param {Token} token
* @returns {ListItem}
*/
function listItem(token) {
return {
type: 'listItem',
spread: token._spread,
checked: null,
children: []
}
}
/** @returns {Paragraph} */
function paragraph() {
return {
type: 'paragraph',
children: []
}
}
/** @returns {Strong} */
function strong() {
return {
type: 'strong',
children: []
}
}
/** @returns {Text} */
function text() {
return {
type: 'text',
value: ''
}
}
/** @returns {ThematicBreak} */
function thematicBreak() {
return {
type: 'thematicBreak'
}
}
}
/**
* Copy a point-like value.
*
* @param {Point} d
* Point-like value.
* @returns {Point}
* unist point.
*/
function lib_point(d) {
return {
line: d.line,
column: d.column,
offset: d.offset
}
}
/**
* @param {Config} combined
* @param {Array>} extensions
* @returns {void}
*/
function configure(combined, extensions) {
let index = -1
while (++index < extensions.length) {
const value = extensions[index]
if (Array.isArray(value)) {
configure(combined, value)
} else {
extension(combined, value)
}
}
}
/**
* @param {Config} combined
* @param {Extension} extension
* @returns {void}
*/
function extension(combined, extension) {
/** @type {keyof Extension} */
let key
for (key in extension) {
if (lib_own.call(extension, key)) {
if (key === 'canContainEols') {
const right = extension[key]
if (right) {
combined[key].push(...right)
}
} else if (key === 'transforms') {
const right = extension[key]
if (right) {
combined[key].push(...right)
}
} else if (key === 'enter' || key === 'exit') {
const right = extension[key]
if (right) {
Object.assign(combined[key], right)
}
}
}
}
}
/** @type {OnEnterError} */
function defaultOnError(left, right) {
if (left) {
throw new Error(
'Cannot close `' +
left.type +
'` (' +
stringifyPosition({
start: left.start,
end: left.end
}) +
'): a different token (`' +
right.type +
'`, ' +
stringifyPosition({
start: right.start,
end: right.end
}) +
') is open'
)
} else {
throw new Error(
'Cannot close document, a token (`' +
right.type +
'`, ' +
stringifyPosition({
start: right.start,
end: right.end
}) +
') is still open'
)
}
}
// EXTERNAL MODULE: ./node_modules/ts-dedent/esm/index.js
var esm = __webpack_require__(18464);
;// CONCATENATED MODULE: ./node_modules/mermaid/dist/createText-aebacdfe.js
function preprocessMarkdown(markdown) {
const withoutMultipleNewlines = markdown.replace(/\n{2,}/g, "\n");
const withoutExtraSpaces = (0,esm/* dedent */.Z)(withoutMultipleNewlines);
return withoutExtraSpaces;
}
function markdownToLines(markdown) {
const preprocessedMarkdown = preprocessMarkdown(markdown);
const { children } = fromMarkdown(preprocessedMarkdown);
const lines = [[]];
let currentLine = 0;
function processNode(node, parentType = "normal") {
if (node.type === "text") {
const textLines = node.value.split("\n");
textLines.forEach((textLine, index) => {
if (index !== 0) {
currentLine++;
lines.push([]);
}
textLine.split(" ").forEach((word) => {
if (word) {
lines[currentLine].push({ content: word, type: parentType });
}
});
});
} else if (node.type === "strong" || node.type === "emphasis") {
node.children.forEach((contentNode) => {
processNode(contentNode, node.type);
});
}
}
children.forEach((treeNode) => {
if (treeNode.type === "paragraph") {
treeNode.children.forEach((contentNode) => {
processNode(contentNode);
});
}
});
return lines;
}
function markdownToHTML(markdown) {
const { children } = fromMarkdown(markdown);
function output(node) {
if (node.type === "text") {
return node.value.replace(/\n/g, "
");
} else if (node.type === "strong") {
return `${node.children.map(output).join("")}`;
} else if (node.type === "emphasis") {
return `${node.children.map(output).join("")}`;
} else if (node.type === "paragraph") {
return `${node.children.map(output).join("")}
`;
}
return `Unsupported markdown: ${node.type}`;
}
return children.map(output).join("");
}
function splitTextToChars(text) {
if (Intl.Segmenter) {
return [...new Intl.Segmenter().segment(text)].map((s) => s.segment);
}
return [...text];
}
function splitWordToFitWidth(checkFit, word) {
const characters = splitTextToChars(word.content);
return splitWordToFitWidthRecursion(checkFit, [], characters, word.type);
}
function splitWordToFitWidthRecursion(checkFit, usedChars, remainingChars, type) {
if (remainingChars.length === 0) {
return [
{ content: usedChars.join(""), type },
{ content: "", type }
];
}
const [nextChar, ...rest] = remainingChars;
const newWord = [...usedChars, nextChar];
if (checkFit([{ content: newWord.join(""), type }])) {
return splitWordToFitWidthRecursion(checkFit, newWord, rest, type);
}
if (usedChars.length === 0 && nextChar) {
usedChars.push(nextChar);
remainingChars.shift();
}
return [
{ content: usedChars.join(""), type },
{ content: remainingChars.join(""), type }
];
}
function splitLineToFitWidth(line, checkFit) {
if (line.some(({ content }) => content.includes("\n"))) {
throw new Error("splitLineToFitWidth does not support newlines in the line");
}
return splitLineToFitWidthRecursion(line, checkFit);
}
function splitLineToFitWidthRecursion(words, checkFit, lines = [], newLine = []) {
if (words.length === 0) {
if (newLine.length > 0) {
lines.push(newLine);
}
return lines.length > 0 ? lines : [];
}
let joiner = "";
if (words[0].content === " ") {
joiner = " ";
words.shift();
}
const nextWord = words.shift() ?? { content: " ", type: "normal" };
const lineWithNextWord = [...newLine];
if (joiner !== "") {
lineWithNextWord.push({ content: joiner, type: "normal" });
}
lineWithNextWord.push(nextWord);
if (checkFit(lineWithNextWord)) {
return splitLineToFitWidthRecursion(words, checkFit, lines, lineWithNextWord);
}
if (newLine.length > 0) {
lines.push(newLine);
words.unshift(nextWord);
} else if (nextWord.content) {
const [line, rest] = splitWordToFitWidth(checkFit, nextWord);
lines.push([line]);
if (rest.content) {
words.unshift(rest);
}
}
return splitLineToFitWidthRecursion(words, checkFit, lines);
}
function applyStyle(dom, styleFn) {
if (styleFn) {
dom.attr("style", styleFn);
}
}
function addHtmlSpan(element, node, width, classes, addBackground = false) {
const fo = element.append("foreignObject");
const div = fo.append("xhtml:div");
const label = node.label;
const labelClass = node.isNode ? "nodeLabel" : "edgeLabel";
div.html(
`
" + label + ""
);
applyStyle(div, node.labelStyle);
div.style("display", "table-cell");
div.style("white-space", "nowrap");
div.style("max-width", width + "px");
div.attr("xmlns", "http://www.w3.org/1999/xhtml");
if (addBackground) {
div.attr("class", "labelBkg");
}
let bbox = div.node().getBoundingClientRect();
if (bbox.width === width) {
div.style("display", "table");
div.style("white-space", "break-spaces");
div.style("width", width + "px");
bbox = div.node().getBoundingClientRect();
}
fo.style("width", bbox.width);
fo.style("height", bbox.height);
return fo.node();
}
function createTspan(textElement, lineIndex, lineHeight) {
return textElement.append("tspan").attr("class", "text-outer-tspan").attr("x", 0).attr("y", lineIndex * lineHeight - 0.1 + "em").attr("dy", lineHeight + "em");
}
function computeWidthOfText(parentNode, lineHeight, line) {
const testElement = parentNode.append("text");
const testSpan = createTspan(testElement, 1, lineHeight);
updateTextContentAndStyles(testSpan, line);
const textLength = testSpan.node().getComputedTextLength();
testElement.remove();
return textLength;
}
function computeDimensionOfText(parentNode, lineHeight, text) {
var _a;
const testElement = parentNode.append("text");
const testSpan = createTspan(testElement, 1, lineHeight);
updateTextContentAndStyles(testSpan, [{ content: text, type: "normal" }]);
const textDimension = (_a = testSpan.node()) == null ? void 0 : _a.getBoundingClientRect();
if (textDimension) {
testElement.remove();
}
return textDimension;
}
function createFormattedText(width, g, structuredText, addBackground = false) {
const lineHeight = 1.1;
const labelGroup = g.append("g");
const bkg = labelGroup.insert("rect").attr("class", "background");
const textElement = labelGroup.append("text").attr("y", "-10.1");
let lineIndex = 0;
for (const line of structuredText) {
const checkWidth = (line2) => computeWidthOfText(labelGroup, lineHeight, line2) <= width;
const linesUnderWidth = checkWidth(line) ? [line] : splitLineToFitWidth(line, checkWidth);
for (const preparedLine of linesUnderWidth) {
const tspan = createTspan(textElement, lineIndex, lineHeight);
updateTextContentAndStyles(tspan, preparedLine);
lineIndex++;
}
}
if (addBackground) {
const bbox = textElement.node().getBBox();
const padding = 2;
bkg.attr("x", -padding).attr("y", -padding).attr("width", bbox.width + 2 * padding).attr("height", bbox.height + 2 * padding);
return labelGroup.node();
} else {
return textElement.node();
}
}
function updateTextContentAndStyles(tspan, wrappedLine) {
tspan.text("");
wrappedLine.forEach((word, index) => {
const innerTspan = tspan.append("tspan").attr("font-style", word.type === "emphasis" ? "italic" : "normal").attr("class", "text-inner-tspan").attr("font-weight", word.type === "strong" ? "bold" : "normal");
if (index === 0) {
innerTspan.text(word.content);
} else {
innerTspan.text(" " + word.content);
}
});
}
const createText = (el, text = "", {
style = "",
isTitle = false,
classes = "",
useHtmlLabels = true,
isNode = true,
width = 200,
addSvgBackground = false
} = {}) => {
mermaid_934d9bea.l.info("createText", text, style, isTitle, classes, useHtmlLabels, isNode, addSvgBackground);
if (useHtmlLabels) {
const htmlText = markdownToHTML(text);
const node = {
isNode,
label: (0,mermaid_934d9bea.J)(htmlText).replace(
/fa[blrs]?:fa-[\w-]+/g,
(s) => ``
),
labelStyle: style.replace("fill:", "color:")
};
const vertexNode = addHtmlSpan(el, node, width, classes, addSvgBackground);
return vertexNode;
} else {
const structuredText = markdownToLines(text);
const svgLabel = createFormattedText(width, el, structuredText, addSvgBackground);
return svgLabel;
}
};
/***/ }),
/***/ 25269:
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ a: () => (/* binding */ insertMarkers$1),
/* harmony export */ b: () => (/* binding */ clear$1),
/* harmony export */ c: () => (/* binding */ createLabel$1),
/* harmony export */ d: () => (/* binding */ clear),
/* harmony export */ e: () => (/* binding */ insertNode),
/* harmony export */ f: () => (/* binding */ insertEdgeLabel),
/* harmony export */ g: () => (/* binding */ insertEdge),
/* harmony export */ h: () => (/* binding */ positionEdgeLabel),
/* harmony export */ i: () => (/* binding */ intersectRect$1),
/* harmony export */ j: () => (/* binding */ getLineFunctionsWithOffset),
/* harmony export */ l: () => (/* binding */ labelHelper),
/* harmony export */ p: () => (/* binding */ positionNode),
/* harmony export */ s: () => (/* binding */ setNodeElem),
/* harmony export */ u: () => (/* binding */ updateNodeBounds)
/* harmony export */ });
/* harmony import */ var _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(85322);
/* harmony import */ var d3__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(64218);
/* harmony import */ var _createText_aebacdfe_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(54511);
const insertMarkers = (elem, markerArray, type, id) => {
markerArray.forEach((markerName) => {
markers[markerName](elem, type, id);
});
};
const extension = (elem, type, id) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.trace("Making markers for ", id);
elem.append("defs").append("marker").attr("id", id + "_" + type + "-extensionStart").attr("class", "marker extension " + type).attr("refX", 18).attr("refY", 7).attr("markerWidth", 190).attr("markerHeight", 240).attr("orient", "auto").append("path").attr("d", "M 1,7 L18,13 V 1 Z");
elem.append("defs").append("marker").attr("id", id + "_" + type + "-extensionEnd").attr("class", "marker extension " + type).attr("refX", 1).attr("refY", 7).attr("markerWidth", 20).attr("markerHeight", 28).attr("orient", "auto").append("path").attr("d", "M 1,1 V 13 L18,7 Z");
};
const composition = (elem, type, id) => {
elem.append("defs").append("marker").attr("id", id + "_" + type + "-compositionStart").attr("class", "marker composition " + type).attr("refX", 18).attr("refY", 7).attr("markerWidth", 190).attr("markerHeight", 240).attr("orient", "auto").append("path").attr("d", "M 18,7 L9,13 L1,7 L9,1 Z");
elem.append("defs").append("marker").attr("id", id + "_" + type + "-compositionEnd").attr("class", "marker composition " + type).attr("refX", 1).attr("refY", 7).attr("markerWidth", 20).attr("markerHeight", 28).attr("orient", "auto").append("path").attr("d", "M 18,7 L9,13 L1,7 L9,1 Z");
};
const aggregation = (elem, type, id) => {
elem.append("defs").append("marker").attr("id", id + "_" + type + "-aggregationStart").attr("class", "marker aggregation " + type).attr("refX", 18).attr("refY", 7).attr("markerWidth", 190).attr("markerHeight", 240).attr("orient", "auto").append("path").attr("d", "M 18,7 L9,13 L1,7 L9,1 Z");
elem.append("defs").append("marker").attr("id", id + "_" + type + "-aggregationEnd").attr("class", "marker aggregation " + type).attr("refX", 1).attr("refY", 7).attr("markerWidth", 20).attr("markerHeight", 28).attr("orient", "auto").append("path").attr("d", "M 18,7 L9,13 L1,7 L9,1 Z");
};
const dependency = (elem, type, id) => {
elem.append("defs").append("marker").attr("id", id + "_" + type + "-dependencyStart").attr("class", "marker dependency " + type).attr("refX", 6).attr("refY", 7).attr("markerWidth", 190).attr("markerHeight", 240).attr("orient", "auto").append("path").attr("d", "M 5,7 L9,13 L1,7 L9,1 Z");
elem.append("defs").append("marker").attr("id", id + "_" + type + "-dependencyEnd").attr("class", "marker dependency " + type).attr("refX", 13).attr("refY", 7).attr("markerWidth", 20).attr("markerHeight", 28).attr("orient", "auto").append("path").attr("d", "M 18,7 L9,13 L14,7 L9,1 Z");
};
const lollipop = (elem, type, id) => {
elem.append("defs").append("marker").attr("id", id + "_" + type + "-lollipopStart").attr("class", "marker lollipop " + type).attr("refX", 13).attr("refY", 7).attr("markerWidth", 190).attr("markerHeight", 240).attr("orient", "auto").append("circle").attr("stroke", "black").attr("fill", "transparent").attr("cx", 7).attr("cy", 7).attr("r", 6);
elem.append("defs").append("marker").attr("id", id + "_" + type + "-lollipopEnd").attr("class", "marker lollipop " + type).attr("refX", 1).attr("refY", 7).attr("markerWidth", 190).attr("markerHeight", 240).attr("orient", "auto").append("circle").attr("stroke", "black").attr("fill", "transparent").attr("cx", 7).attr("cy", 7).attr("r", 6);
};
const point = (elem, type, id) => {
elem.append("marker").attr("id", id + "_" + type + "-pointEnd").attr("class", "marker " + type).attr("viewBox", "0 0 10 10").attr("refX", 6).attr("refY", 5).attr("markerUnits", "userSpaceOnUse").attr("markerWidth", 12).attr("markerHeight", 12).attr("orient", "auto").append("path").attr("d", "M 0 0 L 10 5 L 0 10 z").attr("class", "arrowMarkerPath").style("stroke-width", 1).style("stroke-dasharray", "1,0");
elem.append("marker").attr("id", id + "_" + type + "-pointStart").attr("class", "marker " + type).attr("viewBox", "0 0 10 10").attr("refX", 4.5).attr("refY", 5).attr("markerUnits", "userSpaceOnUse").attr("markerWidth", 12).attr("markerHeight", 12).attr("orient", "auto").append("path").attr("d", "M 0 5 L 10 10 L 10 0 z").attr("class", "arrowMarkerPath").style("stroke-width", 1).style("stroke-dasharray", "1,0");
};
const circle$1 = (elem, type, id) => {
elem.append("marker").attr("id", id + "_" + type + "-circleEnd").attr("class", "marker " + type).attr("viewBox", "0 0 10 10").attr("refX", 11).attr("refY", 5).attr("markerUnits", "userSpaceOnUse").attr("markerWidth", 11).attr("markerHeight", 11).attr("orient", "auto").append("circle").attr("cx", "5").attr("cy", "5").attr("r", "5").attr("class", "arrowMarkerPath").style("stroke-width", 1).style("stroke-dasharray", "1,0");
elem.append("marker").attr("id", id + "_" + type + "-circleStart").attr("class", "marker " + type).attr("viewBox", "0 0 10 10").attr("refX", -1).attr("refY", 5).attr("markerUnits", "userSpaceOnUse").attr("markerWidth", 11).attr("markerHeight", 11).attr("orient", "auto").append("circle").attr("cx", "5").attr("cy", "5").attr("r", "5").attr("class", "arrowMarkerPath").style("stroke-width", 1).style("stroke-dasharray", "1,0");
};
const cross = (elem, type, id) => {
elem.append("marker").attr("id", id + "_" + type + "-crossEnd").attr("class", "marker cross " + type).attr("viewBox", "0 0 11 11").attr("refX", 12).attr("refY", 5.2).attr("markerUnits", "userSpaceOnUse").attr("markerWidth", 11).attr("markerHeight", 11).attr("orient", "auto").append("path").attr("d", "M 1,1 l 9,9 M 10,1 l -9,9").attr("class", "arrowMarkerPath").style("stroke-width", 2).style("stroke-dasharray", "1,0");
elem.append("marker").attr("id", id + "_" + type + "-crossStart").attr("class", "marker cross " + type).attr("viewBox", "0 0 11 11").attr("refX", -1).attr("refY", 5.2).attr("markerUnits", "userSpaceOnUse").attr("markerWidth", 11).attr("markerHeight", 11).attr("orient", "auto").append("path").attr("d", "M 1,1 l 9,9 M 10,1 l -9,9").attr("class", "arrowMarkerPath").style("stroke-width", 2).style("stroke-dasharray", "1,0");
};
const barb = (elem, type, id) => {
elem.append("defs").append("marker").attr("id", id + "_" + type + "-barbEnd").attr("refX", 19).attr("refY", 7).attr("markerWidth", 20).attr("markerHeight", 14).attr("markerUnits", "strokeWidth").attr("orient", "auto").append("path").attr("d", "M 19,7 L9,13 L14,7 L9,1 Z");
};
const markers = {
extension,
composition,
aggregation,
dependency,
lollipop,
point,
circle: circle$1,
cross,
barb
};
const insertMarkers$1 = insertMarkers;
function applyStyle(dom, styleFn) {
if (styleFn) {
dom.attr("style", styleFn);
}
}
function addHtmlLabel(node) {
const fo = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(document.createElementNS("http://www.w3.org/2000/svg", "foreignObject"));
const div = fo.append("xhtml:div");
const label = node.label;
const labelClass = node.isNode ? "nodeLabel" : "edgeLabel";
div.html(
'" + label + ""
);
applyStyle(div, node.labelStyle);
div.style("display", "inline-block");
div.style("white-space", "nowrap");
div.attr("xmlns", "http://www.w3.org/1999/xhtml");
return fo.node();
}
const createLabel = (_vertexText, style, isTitle, isNode) => {
let vertexText = _vertexText || "";
if (typeof vertexText === "object") {
vertexText = vertexText[0];
}
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels)) {
vertexText = vertexText.replace(/\\n|\n/g, "
");
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("vertexText" + vertexText);
const node = {
isNode,
label: (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.J)(vertexText).replace(
/fa[blrs]?:fa-[\w-]+/g,
(s) => ``
),
labelStyle: style.replace("fill:", "color:")
};
let vertexNode = addHtmlLabel(node);
return vertexNode;
} else {
const svgLabel = document.createElementNS("http://www.w3.org/2000/svg", "text");
svgLabel.setAttribute("style", style.replace("color:", "fill:"));
let rows = [];
if (typeof vertexText === "string") {
rows = vertexText.split(/\\n|\n|
/gi);
} else if (Array.isArray(vertexText)) {
rows = vertexText;
} else {
rows = [];
}
for (const row of rows) {
const tspan = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
tspan.setAttributeNS("http://www.w3.org/XML/1998/namespace", "xml:space", "preserve");
tspan.setAttribute("dy", "1em");
tspan.setAttribute("x", "0");
if (isTitle) {
tspan.setAttribute("class", "title-row");
} else {
tspan.setAttribute("class", "row");
}
tspan.textContent = row.trim();
svgLabel.appendChild(tspan);
}
return svgLabel;
}
};
const createLabel$1 = createLabel;
const labelHelper = async (parent, node, _classes, isNode) => {
let classes;
const useHtmlLabels = node.useHtmlLabels || (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels);
if (!_classes) {
classes = "node default";
} else {
classes = _classes;
}
const shapeSvg = parent.insert("g").attr("class", classes).attr("id", node.domId || node.id);
const label = shapeSvg.insert("g").attr("class", "label").attr("style", node.labelStyle);
let labelText;
if (node.labelText === void 0) {
labelText = "";
} else {
labelText = typeof node.labelText === "string" ? node.labelText : node.labelText[0];
}
const textNode = label.node();
let text;
if (node.labelType === "markdown") {
text = (0,_createText_aebacdfe_js__WEBPACK_IMPORTED_MODULE_2__.a)(label, (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.d)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.J)(labelText), (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)()), {
useHtmlLabels,
width: node.width || (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.wrappingWidth,
classes: "markdown-node-label"
});
} else {
text = textNode.appendChild(
createLabel$1(
(0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.d)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.J)(labelText), (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)()),
node.labelStyle,
false,
isNode
)
);
}
let bbox = text.getBBox();
const halfPadding = node.padding / 2;
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels)) {
const div = text.children[0];
const dv = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(text);
const images = div.getElementsByTagName("img");
if (images) {
const noImgText = labelText.replace(/]*>/g, "").trim() === "";
await Promise.all(
[...images].map(
(img) => new Promise((res) => {
function setupImage() {
img.style.display = "flex";
img.style.flexDirection = "column";
if (noImgText) {
const bodyFontSize = (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().fontSize ? (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().fontSize : window.getComputedStyle(document.body).fontSize;
const enlargingFactor = 5;
img.style.width = parseInt(bodyFontSize, 10) * enlargingFactor + "px";
} else {
img.style.width = "100%";
}
res(img);
}
setTimeout(() => {
if (img.complete) {
setupImage();
}
});
img.addEventListener("error", setupImage);
img.addEventListener("load", setupImage);
})
)
);
}
bbox = div.getBoundingClientRect();
dv.attr("width", bbox.width);
dv.attr("height", bbox.height);
}
if (useHtmlLabels) {
label.attr("transform", "translate(" + -bbox.width / 2 + ", " + -bbox.height / 2 + ")");
} else {
label.attr("transform", "translate(0, " + -bbox.height / 2 + ")");
}
if (node.centerLabel) {
label.attr("transform", "translate(" + -bbox.width / 2 + ", " + -bbox.height / 2 + ")");
}
label.insert("rect", ":first-child");
return { shapeSvg, bbox, halfPadding, label };
};
const updateNodeBounds = (node, element) => {
const bbox = element.node().getBBox();
node.width = bbox.width;
node.height = bbox.height;
};
function insertPolygonShape(parent, w, h, points) {
return parent.insert("polygon", ":first-child").attr(
"points",
points.map(function(d) {
return d.x + "," + d.y;
}).join(" ")
).attr("class", "label-container").attr("transform", "translate(" + -w / 2 + "," + h / 2 + ")");
}
function intersectNode(node, point2) {
return node.intersect(point2);
}
function intersectEllipse(node, rx, ry, point2) {
var cx = node.x;
var cy = node.y;
var px = cx - point2.x;
var py = cy - point2.y;
var det = Math.sqrt(rx * rx * py * py + ry * ry * px * px);
var dx = Math.abs(rx * ry * px / det);
if (point2.x < cx) {
dx = -dx;
}
var dy = Math.abs(rx * ry * py / det);
if (point2.y < cy) {
dy = -dy;
}
return { x: cx + dx, y: cy + dy };
}
function intersectCircle(node, rx, point2) {
return intersectEllipse(node, rx, rx, point2);
}
function intersectLine(p1, p2, q1, q2) {
var a1, a2, b1, b2, c1, c2;
var r1, r2, r3, r4;
var denom, offset, num;
var x, y;
a1 = p2.y - p1.y;
b1 = p1.x - p2.x;
c1 = p2.x * p1.y - p1.x * p2.y;
r3 = a1 * q1.x + b1 * q1.y + c1;
r4 = a1 * q2.x + b1 * q2.y + c1;
if (r3 !== 0 && r4 !== 0 && sameSign(r3, r4)) {
return;
}
a2 = q2.y - q1.y;
b2 = q1.x - q2.x;
c2 = q2.x * q1.y - q1.x * q2.y;
r1 = a2 * p1.x + b2 * p1.y + c2;
r2 = a2 * p2.x + b2 * p2.y + c2;
if (r1 !== 0 && r2 !== 0 && sameSign(r1, r2)) {
return;
}
denom = a1 * b2 - a2 * b1;
if (denom === 0) {
return;
}
offset = Math.abs(denom / 2);
num = b1 * c2 - b2 * c1;
x = num < 0 ? (num - offset) / denom : (num + offset) / denom;
num = a2 * c1 - a1 * c2;
y = num < 0 ? (num - offset) / denom : (num + offset) / denom;
return { x, y };
}
function sameSign(r1, r2) {
return r1 * r2 > 0;
}
function intersectPolygon(node, polyPoints, point2) {
var x1 = node.x;
var y1 = node.y;
var intersections = [];
var minX = Number.POSITIVE_INFINITY;
var minY = Number.POSITIVE_INFINITY;
if (typeof polyPoints.forEach === "function") {
polyPoints.forEach(function(entry) {
minX = Math.min(minX, entry.x);
minY = Math.min(minY, entry.y);
});
} else {
minX = Math.min(minX, polyPoints.x);
minY = Math.min(minY, polyPoints.y);
}
var left = x1 - node.width / 2 - minX;
var top = y1 - node.height / 2 - minY;
for (var i = 0; i < polyPoints.length; i++) {
var p1 = polyPoints[i];
var p2 = polyPoints[i < polyPoints.length - 1 ? i + 1 : 0];
var intersect2 = intersectLine(
node,
point2,
{ x: left + p1.x, y: top + p1.y },
{ x: left + p2.x, y: top + p2.y }
);
if (intersect2) {
intersections.push(intersect2);
}
}
if (!intersections.length) {
return node;
}
if (intersections.length > 1) {
intersections.sort(function(p, q) {
var pdx = p.x - point2.x;
var pdy = p.y - point2.y;
var distp = Math.sqrt(pdx * pdx + pdy * pdy);
var qdx = q.x - point2.x;
var qdy = q.y - point2.y;
var distq = Math.sqrt(qdx * qdx + qdy * qdy);
return distp < distq ? -1 : distp === distq ? 0 : 1;
});
}
return intersections[0];
}
const intersectRect = (node, point2) => {
var x = node.x;
var y = node.y;
var dx = point2.x - x;
var dy = point2.y - y;
var w = node.width / 2;
var h = node.height / 2;
var sx, sy;
if (Math.abs(dy) * w > Math.abs(dx) * h) {
if (dy < 0) {
h = -h;
}
sx = dy === 0 ? 0 : h * dx / dy;
sy = h;
} else {
if (dx < 0) {
w = -w;
}
sx = w;
sy = dx === 0 ? 0 : w * dy / dx;
}
return { x: x + sx, y: y + sy };
};
const intersectRect$1 = intersectRect;
const intersect = {
node: intersectNode,
circle: intersectCircle,
ellipse: intersectEllipse,
polygon: intersectPolygon,
rect: intersectRect$1
};
const note = async (parent, node) => {
const useHtmlLabels = node.useHtmlLabels || (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels;
if (!useHtmlLabels) {
node.centerLabel = true;
}
const { shapeSvg, bbox, halfPadding } = await labelHelper(
parent,
node,
"node " + node.classes,
true
);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("Classes = ", node.classes);
const rect2 = shapeSvg.insert("rect", ":first-child");
rect2.attr("rx", node.rx).attr("ry", node.ry).attr("x", -bbox.width / 2 - halfPadding).attr("y", -bbox.height / 2 - halfPadding).attr("width", bbox.width + node.padding).attr("height", bbox.height + node.padding);
updateNodeBounds(node, rect2);
node.intersect = function(point2) {
return intersect.rect(node, point2);
};
return shapeSvg;
};
const note$1 = note;
const formatClass = (str) => {
if (str) {
return " " + str;
}
return "";
};
const getClassesFromNode = (node, otherClasses) => {
return `${otherClasses ? otherClasses : "node default"}${formatClass(node.classes)} ${formatClass(
node.class
)}`;
};
const question = async (parent, node) => {
const { shapeSvg, bbox } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const w = bbox.width + node.padding;
const h = bbox.height + node.padding;
const s = w + h;
const points = [
{ x: s / 2, y: 0 },
{ x: s, y: -s / 2 },
{ x: s / 2, y: -s },
{ x: 0, y: -s / 2 }
];
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("Question main (Circle)");
const questionElem = insertPolygonShape(shapeSvg, s, s, points);
questionElem.attr("style", node.style);
updateNodeBounds(node, questionElem);
node.intersect = function(point2) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn("Intersect called");
return intersect.polygon(node, points, point2);
};
return shapeSvg;
};
const choice = (parent, node) => {
const shapeSvg = parent.insert("g").attr("class", "node default").attr("id", node.domId || node.id);
const s = 28;
const points = [
{ x: 0, y: s / 2 },
{ x: s / 2, y: 0 },
{ x: 0, y: -s / 2 },
{ x: -s / 2, y: 0 }
];
const choice2 = shapeSvg.insert("polygon", ":first-child").attr(
"points",
points.map(function(d) {
return d.x + "," + d.y;
}).join(" ")
);
choice2.attr("class", "state-start").attr("r", 7).attr("width", 28).attr("height", 28);
node.width = 28;
node.height = 28;
node.intersect = function(point2) {
return intersect.circle(node, 14, point2);
};
return shapeSvg;
};
const hexagon = async (parent, node) => {
const { shapeSvg, bbox } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const f = 4;
const h = bbox.height + node.padding;
const m = h / f;
const w = bbox.width + 2 * m + node.padding;
const points = [
{ x: m, y: 0 },
{ x: w - m, y: 0 },
{ x: w, y: -h / 2 },
{ x: w - m, y: -h },
{ x: m, y: -h },
{ x: 0, y: -h / 2 }
];
const hex = insertPolygonShape(shapeSvg, w, h, points);
hex.attr("style", node.style);
updateNodeBounds(node, hex);
node.intersect = function(point2) {
return intersect.polygon(node, points, point2);
};
return shapeSvg;
};
const rect_left_inv_arrow = async (parent, node) => {
const { shapeSvg, bbox } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const w = bbox.width + node.padding;
const h = bbox.height + node.padding;
const points = [
{ x: -h / 2, y: 0 },
{ x: w, y: 0 },
{ x: w, y: -h },
{ x: -h / 2, y: -h },
{ x: 0, y: -h / 2 }
];
const el = insertPolygonShape(shapeSvg, w, h, points);
el.attr("style", node.style);
node.width = w + h;
node.height = h;
node.intersect = function(point2) {
return intersect.polygon(node, points, point2);
};
return shapeSvg;
};
const lean_right = async (parent, node) => {
const { shapeSvg, bbox } = await labelHelper(parent, node, getClassesFromNode(node), true);
const w = bbox.width + node.padding;
const h = bbox.height + node.padding;
const points = [
{ x: -2 * h / 6, y: 0 },
{ x: w - h / 6, y: 0 },
{ x: w + 2 * h / 6, y: -h },
{ x: h / 6, y: -h }
];
const el = insertPolygonShape(shapeSvg, w, h, points);
el.attr("style", node.style);
updateNodeBounds(node, el);
node.intersect = function(point2) {
return intersect.polygon(node, points, point2);
};
return shapeSvg;
};
const lean_left = async (parent, node) => {
const { shapeSvg, bbox } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const w = bbox.width + node.padding;
const h = bbox.height + node.padding;
const points = [
{ x: 2 * h / 6, y: 0 },
{ x: w + h / 6, y: 0 },
{ x: w - 2 * h / 6, y: -h },
{ x: -h / 6, y: -h }
];
const el = insertPolygonShape(shapeSvg, w, h, points);
el.attr("style", node.style);
updateNodeBounds(node, el);
node.intersect = function(point2) {
return intersect.polygon(node, points, point2);
};
return shapeSvg;
};
const trapezoid = async (parent, node) => {
const { shapeSvg, bbox } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const w = bbox.width + node.padding;
const h = bbox.height + node.padding;
const points = [
{ x: -2 * h / 6, y: 0 },
{ x: w + 2 * h / 6, y: 0 },
{ x: w - h / 6, y: -h },
{ x: h / 6, y: -h }
];
const el = insertPolygonShape(shapeSvg, w, h, points);
el.attr("style", node.style);
updateNodeBounds(node, el);
node.intersect = function(point2) {
return intersect.polygon(node, points, point2);
};
return shapeSvg;
};
const inv_trapezoid = async (parent, node) => {
const { shapeSvg, bbox } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const w = bbox.width + node.padding;
const h = bbox.height + node.padding;
const points = [
{ x: h / 6, y: 0 },
{ x: w - h / 6, y: 0 },
{ x: w + 2 * h / 6, y: -h },
{ x: -2 * h / 6, y: -h }
];
const el = insertPolygonShape(shapeSvg, w, h, points);
el.attr("style", node.style);
updateNodeBounds(node, el);
node.intersect = function(point2) {
return intersect.polygon(node, points, point2);
};
return shapeSvg;
};
const rect_right_inv_arrow = async (parent, node) => {
const { shapeSvg, bbox } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const w = bbox.width + node.padding;
const h = bbox.height + node.padding;
const points = [
{ x: 0, y: 0 },
{ x: w + h / 2, y: 0 },
{ x: w, y: -h / 2 },
{ x: w + h / 2, y: -h },
{ x: 0, y: -h }
];
const el = insertPolygonShape(shapeSvg, w, h, points);
el.attr("style", node.style);
updateNodeBounds(node, el);
node.intersect = function(point2) {
return intersect.polygon(node, points, point2);
};
return shapeSvg;
};
const cylinder = async (parent, node) => {
const { shapeSvg, bbox } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const w = bbox.width + node.padding;
const rx = w / 2;
const ry = rx / (2.5 + w / 50);
const h = bbox.height + ry + node.padding;
const shape = "M 0," + ry + " a " + rx + "," + ry + " 0,0,0 " + w + " 0 a " + rx + "," + ry + " 0,0,0 " + -w + " 0 l 0," + h + " a " + rx + "," + ry + " 0,0,0 " + w + " 0 l 0," + -h;
const el = shapeSvg.attr("label-offset-y", ry).insert("path", ":first-child").attr("style", node.style).attr("d", shape).attr("transform", "translate(" + -w / 2 + "," + -(h / 2 + ry) + ")");
updateNodeBounds(node, el);
node.intersect = function(point2) {
const pos = intersect.rect(node, point2);
const x = pos.x - node.x;
if (rx != 0 && (Math.abs(x) < node.width / 2 || Math.abs(x) == node.width / 2 && Math.abs(pos.y - node.y) > node.height / 2 - ry)) {
let y = ry * ry * (1 - x * x / (rx * rx));
if (y != 0) {
y = Math.sqrt(y);
}
y = ry - y;
if (point2.y - node.y > 0) {
y = -y;
}
pos.y += y;
}
return pos;
};
return shapeSvg;
};
const rect = async (parent, node) => {
const { shapeSvg, bbox, halfPadding } = await labelHelper(
parent,
node,
"node " + node.classes + " " + node.class,
true
);
const rect2 = shapeSvg.insert("rect", ":first-child");
const totalWidth = bbox.width + node.padding;
const totalHeight = bbox.height + node.padding;
rect2.attr("class", "basic label-container").attr("style", node.style).attr("rx", node.rx).attr("ry", node.ry).attr("x", -bbox.width / 2 - halfPadding).attr("y", -bbox.height / 2 - halfPadding).attr("width", totalWidth).attr("height", totalHeight);
if (node.props) {
const propKeys = new Set(Object.keys(node.props));
if (node.props.borders) {
applyNodePropertyBorders(rect2, node.props.borders, totalWidth, totalHeight);
propKeys.delete("borders");
}
propKeys.forEach((propKey) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn(`Unknown node property ${propKey}`);
});
}
updateNodeBounds(node, rect2);
node.intersect = function(point2) {
return intersect.rect(node, point2);
};
return shapeSvg;
};
const labelRect = async (parent, node) => {
const { shapeSvg } = await labelHelper(parent, node, "label", true);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.trace("Classes = ", node.class);
const rect2 = shapeSvg.insert("rect", ":first-child");
const totalWidth = 0;
const totalHeight = 0;
rect2.attr("width", totalWidth).attr("height", totalHeight);
shapeSvg.attr("class", "label edgeLabel");
if (node.props) {
const propKeys = new Set(Object.keys(node.props));
if (node.props.borders) {
applyNodePropertyBorders(rect2, node.props.borders, totalWidth, totalHeight);
propKeys.delete("borders");
}
propKeys.forEach((propKey) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn(`Unknown node property ${propKey}`);
});
}
updateNodeBounds(node, rect2);
node.intersect = function(point2) {
return intersect.rect(node, point2);
};
return shapeSvg;
};
function applyNodePropertyBorders(rect2, borders, totalWidth, totalHeight) {
const strokeDashArray = [];
const addBorder = (length) => {
strokeDashArray.push(length, 0);
};
const skipBorder = (length) => {
strokeDashArray.push(0, length);
};
if (borders.includes("t")) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.debug("add top border");
addBorder(totalWidth);
} else {
skipBorder(totalWidth);
}
if (borders.includes("r")) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.debug("add right border");
addBorder(totalHeight);
} else {
skipBorder(totalHeight);
}
if (borders.includes("b")) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.debug("add bottom border");
addBorder(totalWidth);
} else {
skipBorder(totalWidth);
}
if (borders.includes("l")) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.debug("add left border");
addBorder(totalHeight);
} else {
skipBorder(totalHeight);
}
rect2.attr("stroke-dasharray", strokeDashArray.join(" "));
}
const rectWithTitle = (parent, node) => {
let classes;
if (!node.classes) {
classes = "node default";
} else {
classes = "node " + node.classes;
}
const shapeSvg = parent.insert("g").attr("class", classes).attr("id", node.domId || node.id);
const rect2 = shapeSvg.insert("rect", ":first-child");
const innerLine = shapeSvg.insert("line");
const label = shapeSvg.insert("g").attr("class", "label");
const text2 = node.labelText.flat ? node.labelText.flat() : node.labelText;
let title = "";
if (typeof text2 === "object") {
title = text2[0];
} else {
title = text2;
}
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("Label text abc79", title, text2, typeof text2 === "object");
const text = label.node().appendChild(createLabel$1(title, node.labelStyle, true, true));
let bbox = { width: 0, height: 0 };
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels)) {
const div = text.children[0];
const dv = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(text);
bbox = div.getBoundingClientRect();
dv.attr("width", bbox.width);
dv.attr("height", bbox.height);
}
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("Text 2", text2);
const textRows = text2.slice(1, text2.length);
let titleBox = text.getBBox();
const descr = label.node().appendChild(
createLabel$1(textRows.join ? textRows.join("
") : textRows, node.labelStyle, true, true)
);
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels)) {
const div = descr.children[0];
const dv = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(descr);
bbox = div.getBoundingClientRect();
dv.attr("width", bbox.width);
dv.attr("height", bbox.height);
}
const halfPadding = node.padding / 2;
(0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(descr).attr(
"transform",
"translate( " + // (titleBox.width - bbox.width) / 2 +
(bbox.width > titleBox.width ? 0 : (titleBox.width - bbox.width) / 2) + ", " + (titleBox.height + halfPadding + 5) + ")"
);
(0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(text).attr(
"transform",
"translate( " + // (titleBox.width - bbox.width) / 2 +
(bbox.width < titleBox.width ? 0 : -(titleBox.width - bbox.width) / 2) + ", 0)"
);
bbox = label.node().getBBox();
label.attr(
"transform",
"translate(" + -bbox.width / 2 + ", " + (-bbox.height / 2 - halfPadding + 3) + ")"
);
rect2.attr("class", "outer title-state").attr("x", -bbox.width / 2 - halfPadding).attr("y", -bbox.height / 2 - halfPadding).attr("width", bbox.width + node.padding).attr("height", bbox.height + node.padding);
innerLine.attr("class", "divider").attr("x1", -bbox.width / 2 - halfPadding).attr("x2", bbox.width / 2 + halfPadding).attr("y1", -bbox.height / 2 - halfPadding + titleBox.height + halfPadding).attr("y2", -bbox.height / 2 - halfPadding + titleBox.height + halfPadding);
updateNodeBounds(node, rect2);
node.intersect = function(point2) {
return intersect.rect(node, point2);
};
return shapeSvg;
};
const stadium = async (parent, node) => {
const { shapeSvg, bbox } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const h = bbox.height + node.padding;
const w = bbox.width + h / 4 + node.padding;
const rect2 = shapeSvg.insert("rect", ":first-child").attr("style", node.style).attr("rx", h / 2).attr("ry", h / 2).attr("x", -w / 2).attr("y", -h / 2).attr("width", w).attr("height", h);
updateNodeBounds(node, rect2);
node.intersect = function(point2) {
return intersect.rect(node, point2);
};
return shapeSvg;
};
const circle = async (parent, node) => {
const { shapeSvg, bbox, halfPadding } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const circle2 = shapeSvg.insert("circle", ":first-child");
circle2.attr("style", node.style).attr("rx", node.rx).attr("ry", node.ry).attr("r", bbox.width / 2 + halfPadding).attr("width", bbox.width + node.padding).attr("height", bbox.height + node.padding);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("Circle main");
updateNodeBounds(node, circle2);
node.intersect = function(point2) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("Circle intersect", node, bbox.width / 2 + halfPadding, point2);
return intersect.circle(node, bbox.width / 2 + halfPadding, point2);
};
return shapeSvg;
};
const doublecircle = async (parent, node) => {
const { shapeSvg, bbox, halfPadding } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const gap = 5;
const circleGroup = shapeSvg.insert("g", ":first-child");
const outerCircle = circleGroup.insert("circle");
const innerCircle = circleGroup.insert("circle");
circleGroup.attr("class", node.class);
outerCircle.attr("style", node.style).attr("rx", node.rx).attr("ry", node.ry).attr("r", bbox.width / 2 + halfPadding + gap).attr("width", bbox.width + node.padding + gap * 2).attr("height", bbox.height + node.padding + gap * 2);
innerCircle.attr("style", node.style).attr("rx", node.rx).attr("ry", node.ry).attr("r", bbox.width / 2 + halfPadding).attr("width", bbox.width + node.padding).attr("height", bbox.height + node.padding);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("DoubleCircle main");
updateNodeBounds(node, outerCircle);
node.intersect = function(point2) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("DoubleCircle intersect", node, bbox.width / 2 + halfPadding + gap, point2);
return intersect.circle(node, bbox.width / 2 + halfPadding + gap, point2);
};
return shapeSvg;
};
const subroutine = async (parent, node) => {
const { shapeSvg, bbox } = await labelHelper(
parent,
node,
getClassesFromNode(node, void 0),
true
);
const w = bbox.width + node.padding;
const h = bbox.height + node.padding;
const points = [
{ x: 0, y: 0 },
{ x: w, y: 0 },
{ x: w, y: -h },
{ x: 0, y: -h },
{ x: 0, y: 0 },
{ x: -8, y: 0 },
{ x: w + 8, y: 0 },
{ x: w + 8, y: -h },
{ x: -8, y: -h },
{ x: -8, y: 0 }
];
const el = insertPolygonShape(shapeSvg, w, h, points);
el.attr("style", node.style);
updateNodeBounds(node, el);
node.intersect = function(point2) {
return intersect.polygon(node, points, point2);
};
return shapeSvg;
};
const start = (parent, node) => {
const shapeSvg = parent.insert("g").attr("class", "node default").attr("id", node.domId || node.id);
const circle2 = shapeSvg.insert("circle", ":first-child");
circle2.attr("class", "state-start").attr("r", 7).attr("width", 14).attr("height", 14);
updateNodeBounds(node, circle2);
node.intersect = function(point2) {
return intersect.circle(node, 7, point2);
};
return shapeSvg;
};
const forkJoin = (parent, node, dir) => {
const shapeSvg = parent.insert("g").attr("class", "node default").attr("id", node.domId || node.id);
let width = 70;
let height = 10;
if (dir === "LR") {
width = 10;
height = 70;
}
const shape = shapeSvg.append("rect").attr("x", -1 * width / 2).attr("y", -1 * height / 2).attr("width", width).attr("height", height).attr("class", "fork-join");
updateNodeBounds(node, shape);
node.height = node.height + node.padding / 2;
node.width = node.width + node.padding / 2;
node.intersect = function(point2) {
return intersect.rect(node, point2);
};
return shapeSvg;
};
const end = (parent, node) => {
const shapeSvg = parent.insert("g").attr("class", "node default").attr("id", node.domId || node.id);
const innerCircle = shapeSvg.insert("circle", ":first-child");
const circle2 = shapeSvg.insert("circle", ":first-child");
circle2.attr("class", "state-start").attr("r", 7).attr("width", 14).attr("height", 14);
innerCircle.attr("class", "state-end").attr("r", 5).attr("width", 10).attr("height", 10);
updateNodeBounds(node, circle2);
node.intersect = function(point2) {
return intersect.circle(node, 7, point2);
};
return shapeSvg;
};
const class_box = (parent, node) => {
const halfPadding = node.padding / 2;
const rowPadding = 4;
const lineHeight = 8;
let classes;
if (!node.classes) {
classes = "node default";
} else {
classes = "node " + node.classes;
}
const shapeSvg = parent.insert("g").attr("class", classes).attr("id", node.domId || node.id);
const rect2 = shapeSvg.insert("rect", ":first-child");
const topLine = shapeSvg.insert("line");
const bottomLine = shapeSvg.insert("line");
let maxWidth = 0;
let maxHeight = rowPadding;
const labelContainer = shapeSvg.insert("g").attr("class", "label");
let verticalPos = 0;
const hasInterface = node.classData.annotations && node.classData.annotations[0];
const interfaceLabelText = node.classData.annotations[0] ? "«" + node.classData.annotations[0] + "»" : "";
const interfaceLabel = labelContainer.node().appendChild(createLabel$1(interfaceLabelText, node.labelStyle, true, true));
let interfaceBBox = interfaceLabel.getBBox();
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels)) {
const div = interfaceLabel.children[0];
const dv = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(interfaceLabel);
interfaceBBox = div.getBoundingClientRect();
dv.attr("width", interfaceBBox.width);
dv.attr("height", interfaceBBox.height);
}
if (node.classData.annotations[0]) {
maxHeight += interfaceBBox.height + rowPadding;
maxWidth += interfaceBBox.width;
}
let classTitleString = node.classData.label;
if (node.classData.type !== void 0 && node.classData.type !== "") {
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels) {
classTitleString += "<" + node.classData.type + ">";
} else {
classTitleString += "<" + node.classData.type + ">";
}
}
const classTitleLabel = labelContainer.node().appendChild(createLabel$1(classTitleString, node.labelStyle, true, true));
(0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(classTitleLabel).attr("class", "classTitle");
let classTitleBBox = classTitleLabel.getBBox();
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels)) {
const div = classTitleLabel.children[0];
const dv = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(classTitleLabel);
classTitleBBox = div.getBoundingClientRect();
dv.attr("width", classTitleBBox.width);
dv.attr("height", classTitleBBox.height);
}
maxHeight += classTitleBBox.height + rowPadding;
if (classTitleBBox.width > maxWidth) {
maxWidth = classTitleBBox.width;
}
const classAttributes = [];
node.classData.members.forEach((member) => {
const parsedInfo = member.getDisplayDetails();
let parsedText = parsedInfo.displayText;
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels) {
parsedText = parsedText.replace(//g, ">");
}
const lbl = labelContainer.node().appendChild(
createLabel$1(
parsedText,
parsedInfo.cssStyle ? parsedInfo.cssStyle : node.labelStyle,
true,
true
)
);
let bbox = lbl.getBBox();
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels)) {
const div = lbl.children[0];
const dv = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(lbl);
bbox = div.getBoundingClientRect();
dv.attr("width", bbox.width);
dv.attr("height", bbox.height);
}
if (bbox.width > maxWidth) {
maxWidth = bbox.width;
}
maxHeight += bbox.height + rowPadding;
classAttributes.push(lbl);
});
maxHeight += lineHeight;
const classMethods = [];
node.classData.methods.forEach((member) => {
const parsedInfo = member.getDisplayDetails();
let displayText = parsedInfo.displayText;
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels) {
displayText = displayText.replace(//g, ">");
}
const lbl = labelContainer.node().appendChild(
createLabel$1(
displayText,
parsedInfo.cssStyle ? parsedInfo.cssStyle : node.labelStyle,
true,
true
)
);
let bbox = lbl.getBBox();
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels)) {
const div = lbl.children[0];
const dv = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(lbl);
bbox = div.getBoundingClientRect();
dv.attr("width", bbox.width);
dv.attr("height", bbox.height);
}
if (bbox.width > maxWidth) {
maxWidth = bbox.width;
}
maxHeight += bbox.height + rowPadding;
classMethods.push(lbl);
});
maxHeight += lineHeight;
if (hasInterface) {
let diffX2 = (maxWidth - interfaceBBox.width) / 2;
(0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(interfaceLabel).attr(
"transform",
"translate( " + (-1 * maxWidth / 2 + diffX2) + ", " + -1 * maxHeight / 2 + ")"
);
verticalPos = interfaceBBox.height + rowPadding;
}
let diffX = (maxWidth - classTitleBBox.width) / 2;
(0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(classTitleLabel).attr(
"transform",
"translate( " + (-1 * maxWidth / 2 + diffX) + ", " + (-1 * maxHeight / 2 + verticalPos) + ")"
);
verticalPos += classTitleBBox.height + rowPadding;
topLine.attr("class", "divider").attr("x1", -maxWidth / 2 - halfPadding).attr("x2", maxWidth / 2 + halfPadding).attr("y1", -maxHeight / 2 - halfPadding + lineHeight + verticalPos).attr("y2", -maxHeight / 2 - halfPadding + lineHeight + verticalPos);
verticalPos += lineHeight;
classAttributes.forEach((lbl) => {
(0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(lbl).attr(
"transform",
"translate( " + -maxWidth / 2 + ", " + (-1 * maxHeight / 2 + verticalPos + lineHeight / 2) + ")"
);
const memberBBox = lbl == null ? void 0 : lbl.getBBox();
verticalPos += ((memberBBox == null ? void 0 : memberBBox.height) ?? 0) + rowPadding;
});
verticalPos += lineHeight;
bottomLine.attr("class", "divider").attr("x1", -maxWidth / 2 - halfPadding).attr("x2", maxWidth / 2 + halfPadding).attr("y1", -maxHeight / 2 - halfPadding + lineHeight + verticalPos).attr("y2", -maxHeight / 2 - halfPadding + lineHeight + verticalPos);
verticalPos += lineHeight;
classMethods.forEach((lbl) => {
(0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(lbl).attr(
"transform",
"translate( " + -maxWidth / 2 + ", " + (-1 * maxHeight / 2 + verticalPos) + ")"
);
const memberBBox = lbl == null ? void 0 : lbl.getBBox();
verticalPos += ((memberBBox == null ? void 0 : memberBBox.height) ?? 0) + rowPadding;
});
rect2.attr("class", "outer title-state").attr("x", -maxWidth / 2 - halfPadding).attr("y", -(maxHeight / 2) - halfPadding).attr("width", maxWidth + node.padding).attr("height", maxHeight + node.padding);
updateNodeBounds(node, rect2);
node.intersect = function(point2) {
return intersect.rect(node, point2);
};
return shapeSvg;
};
const shapes = {
rhombus: question,
question,
rect,
labelRect,
rectWithTitle,
choice,
circle,
doublecircle,
stadium,
hexagon,
rect_left_inv_arrow,
lean_right,
lean_left,
trapezoid,
inv_trapezoid,
rect_right_inv_arrow,
cylinder,
start,
end,
note: note$1,
subroutine,
fork: forkJoin,
join: forkJoin,
class_box
};
let nodeElems = {};
const insertNode = async (elem, node, dir) => {
let newEl;
let el;
if (node.link) {
let target;
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().securityLevel === "sandbox") {
target = "_top";
} else if (node.linkTarget) {
target = node.linkTarget || "_blank";
}
newEl = elem.insert("svg:a").attr("xlink:href", node.link).attr("target", target);
el = await shapes[node.shape](newEl, node, dir);
} else {
el = await shapes[node.shape](elem, node, dir);
newEl = el;
}
if (node.tooltip) {
el.attr("title", node.tooltip);
}
if (node.class) {
el.attr("class", "node default " + node.class);
}
nodeElems[node.id] = newEl;
if (node.haveCallback) {
nodeElems[node.id].attr("class", nodeElems[node.id].attr("class") + " clickable");
}
return newEl;
};
const setNodeElem = (elem, node) => {
nodeElems[node.id] = elem;
};
const clear$1 = () => {
nodeElems = {};
};
const positionNode = (node) => {
const el = nodeElems[node.id];
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.trace(
"Transforming node",
node.diff,
node,
"translate(" + (node.x - node.width / 2 - 5) + ", " + node.width / 2 + ")"
);
const padding = 8;
const diff = node.diff || 0;
if (node.clusterNode) {
el.attr(
"transform",
"translate(" + (node.x + diff - node.width / 2) + ", " + (node.y - node.height / 2 - padding) + ")"
);
} else {
el.attr("transform", "translate(" + node.x + ", " + node.y + ")");
}
return diff;
};
const markerOffsets = {
aggregation: 18,
extension: 18,
composition: 18,
dependency: 6,
lollipop: 13.5,
arrow_point: 5.3
};
function calculateDeltaAndAngle(point1, point2) {
point1 = pointTransformer(point1);
point2 = pointTransformer(point2);
const [x1, y1] = [point1.x, point1.y];
const [x2, y2] = [point2.x, point2.y];
const deltaX = x2 - x1;
const deltaY = y2 - y1;
return { angle: Math.atan(deltaY / deltaX), deltaX, deltaY };
}
const pointTransformer = (data) => {
if (Array.isArray(data)) {
return { x: data[0], y: data[1] };
}
return data;
};
const getLineFunctionsWithOffset = (edge) => {
return {
x: function(d, i, data) {
let offset = 0;
if (i === 0 && Object.hasOwn(markerOffsets, edge.arrowTypeStart)) {
const { angle, deltaX } = calculateDeltaAndAngle(data[0], data[1]);
offset = markerOffsets[edge.arrowTypeStart] * Math.cos(angle) * (deltaX >= 0 ? 1 : -1);
} else if (i === data.length - 1 && Object.hasOwn(markerOffsets, edge.arrowTypeEnd)) {
const { angle, deltaX } = calculateDeltaAndAngle(
data[data.length - 1],
data[data.length - 2]
);
offset = markerOffsets[edge.arrowTypeEnd] * Math.cos(angle) * (deltaX >= 0 ? 1 : -1);
}
return pointTransformer(d).x + offset;
},
y: function(d, i, data) {
let offset = 0;
if (i === 0 && Object.hasOwn(markerOffsets, edge.arrowTypeStart)) {
const { angle, deltaY } = calculateDeltaAndAngle(data[0], data[1]);
offset = markerOffsets[edge.arrowTypeStart] * Math.abs(Math.sin(angle)) * (deltaY >= 0 ? 1 : -1);
} else if (i === data.length - 1 && Object.hasOwn(markerOffsets, edge.arrowTypeEnd)) {
const { angle, deltaY } = calculateDeltaAndAngle(
data[data.length - 1],
data[data.length - 2]
);
offset = markerOffsets[edge.arrowTypeEnd] * Math.abs(Math.sin(angle)) * (deltaY >= 0 ? 1 : -1);
}
return pointTransformer(d).y + offset;
}
};
};
let edgeLabels = {};
let terminalLabels = {};
const clear = () => {
edgeLabels = {};
terminalLabels = {};
};
const insertEdgeLabel = (elem, edge) => {
const useHtmlLabels = (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels);
const labelElement = edge.labelType === "markdown" ? (0,_createText_aebacdfe_js__WEBPACK_IMPORTED_MODULE_2__.a)(elem, edge.label, {
style: edge.labelStyle,
useHtmlLabels,
addSvgBackground: true
}) : createLabel$1(edge.label, edge.labelStyle);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("abc82", edge, edge.labelType);
const edgeLabel = elem.insert("g").attr("class", "edgeLabel");
const label = edgeLabel.insert("g").attr("class", "label");
label.node().appendChild(labelElement);
let bbox = labelElement.getBBox();
if (useHtmlLabels) {
const div = labelElement.children[0];
const dv = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(labelElement);
bbox = div.getBoundingClientRect();
dv.attr("width", bbox.width);
dv.attr("height", bbox.height);
}
label.attr("transform", "translate(" + -bbox.width / 2 + ", " + -bbox.height / 2 + ")");
edgeLabels[edge.id] = edgeLabel;
edge.width = bbox.width;
edge.height = bbox.height;
let fo;
if (edge.startLabelLeft) {
const startLabelElement = createLabel$1(edge.startLabelLeft, edge.labelStyle);
const startEdgeLabelLeft = elem.insert("g").attr("class", "edgeTerminals");
const inner = startEdgeLabelLeft.insert("g").attr("class", "inner");
fo = inner.node().appendChild(startLabelElement);
const slBox = startLabelElement.getBBox();
inner.attr("transform", "translate(" + -slBox.width / 2 + ", " + -slBox.height / 2 + ")");
if (!terminalLabels[edge.id]) {
terminalLabels[edge.id] = {};
}
terminalLabels[edge.id].startLeft = startEdgeLabelLeft;
setTerminalWidth(fo, edge.startLabelLeft);
}
if (edge.startLabelRight) {
const startLabelElement = createLabel$1(edge.startLabelRight, edge.labelStyle);
const startEdgeLabelRight = elem.insert("g").attr("class", "edgeTerminals");
const inner = startEdgeLabelRight.insert("g").attr("class", "inner");
fo = startEdgeLabelRight.node().appendChild(startLabelElement);
inner.node().appendChild(startLabelElement);
const slBox = startLabelElement.getBBox();
inner.attr("transform", "translate(" + -slBox.width / 2 + ", " + -slBox.height / 2 + ")");
if (!terminalLabels[edge.id]) {
terminalLabels[edge.id] = {};
}
terminalLabels[edge.id].startRight = startEdgeLabelRight;
setTerminalWidth(fo, edge.startLabelRight);
}
if (edge.endLabelLeft) {
const endLabelElement = createLabel$1(edge.endLabelLeft, edge.labelStyle);
const endEdgeLabelLeft = elem.insert("g").attr("class", "edgeTerminals");
const inner = endEdgeLabelLeft.insert("g").attr("class", "inner");
fo = inner.node().appendChild(endLabelElement);
const slBox = endLabelElement.getBBox();
inner.attr("transform", "translate(" + -slBox.width / 2 + ", " + -slBox.height / 2 + ")");
endEdgeLabelLeft.node().appendChild(endLabelElement);
if (!terminalLabels[edge.id]) {
terminalLabels[edge.id] = {};
}
terminalLabels[edge.id].endLeft = endEdgeLabelLeft;
setTerminalWidth(fo, edge.endLabelLeft);
}
if (edge.endLabelRight) {
const endLabelElement = createLabel$1(edge.endLabelRight, edge.labelStyle);
const endEdgeLabelRight = elem.insert("g").attr("class", "edgeTerminals");
const inner = endEdgeLabelRight.insert("g").attr("class", "inner");
fo = inner.node().appendChild(endLabelElement);
const slBox = endLabelElement.getBBox();
inner.attr("transform", "translate(" + -slBox.width / 2 + ", " + -slBox.height / 2 + ")");
endEdgeLabelRight.node().appendChild(endLabelElement);
if (!terminalLabels[edge.id]) {
terminalLabels[edge.id] = {};
}
terminalLabels[edge.id].endRight = endEdgeLabelRight;
setTerminalWidth(fo, edge.endLabelRight);
}
return labelElement;
};
function setTerminalWidth(fo, value) {
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.htmlLabels && fo) {
fo.style.width = value.length * 9 + "px";
fo.style.height = "12px";
}
}
const positionEdgeLabel = (edge, paths) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("Moving label abc78 ", edge.id, edge.label, edgeLabels[edge.id]);
let path = paths.updatedPath ? paths.updatedPath : paths.originalPath;
if (edge.label) {
const el = edgeLabels[edge.id];
let x = edge.x;
let y = edge.y;
if (path) {
const pos = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.u.calcLabelPosition(path);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info(
"Moving label " + edge.label + " from (",
x,
",",
y,
") to (",
pos.x,
",",
pos.y,
") abc78"
);
if (paths.updatedPath) {
x = pos.x;
y = pos.y;
}
}
el.attr("transform", "translate(" + x + ", " + y + ")");
}
if (edge.startLabelLeft) {
const el = terminalLabels[edge.id].startLeft;
let x = edge.x;
let y = edge.y;
if (path) {
const pos = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.u.calcTerminalLabelPosition(edge.arrowTypeStart ? 10 : 0, "start_left", path);
x = pos.x;
y = pos.y;
}
el.attr("transform", "translate(" + x + ", " + y + ")");
}
if (edge.startLabelRight) {
const el = terminalLabels[edge.id].startRight;
let x = edge.x;
let y = edge.y;
if (path) {
const pos = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.u.calcTerminalLabelPosition(
edge.arrowTypeStart ? 10 : 0,
"start_right",
path
);
x = pos.x;
y = pos.y;
}
el.attr("transform", "translate(" + x + ", " + y + ")");
}
if (edge.endLabelLeft) {
const el = terminalLabels[edge.id].endLeft;
let x = edge.x;
let y = edge.y;
if (path) {
const pos = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.u.calcTerminalLabelPosition(edge.arrowTypeEnd ? 10 : 0, "end_left", path);
x = pos.x;
y = pos.y;
}
el.attr("transform", "translate(" + x + ", " + y + ")");
}
if (edge.endLabelRight) {
const el = terminalLabels[edge.id].endRight;
let x = edge.x;
let y = edge.y;
if (path) {
const pos = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.u.calcTerminalLabelPosition(edge.arrowTypeEnd ? 10 : 0, "end_right", path);
x = pos.x;
y = pos.y;
}
el.attr("transform", "translate(" + x + ", " + y + ")");
}
};
const outsideNode = (node, point2) => {
const x = node.x;
const y = node.y;
const dx = Math.abs(point2.x - x);
const dy = Math.abs(point2.y - y);
const w = node.width / 2;
const h = node.height / 2;
if (dx >= w || dy >= h) {
return true;
}
return false;
};
const intersection = (node, outsidePoint, insidePoint) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn(`intersection calc abc89:
outsidePoint: ${JSON.stringify(outsidePoint)}
insidePoint : ${JSON.stringify(insidePoint)}
node : x:${node.x} y:${node.y} w:${node.width} h:${node.height}`);
const x = node.x;
const y = node.y;
const dx = Math.abs(x - insidePoint.x);
const w = node.width / 2;
let r = insidePoint.x < outsidePoint.x ? w - dx : w + dx;
const h = node.height / 2;
const Q = Math.abs(outsidePoint.y - insidePoint.y);
const R = Math.abs(outsidePoint.x - insidePoint.x);
if (Math.abs(y - outsidePoint.y) * w > Math.abs(x - outsidePoint.x) * h) {
let q = insidePoint.y < outsidePoint.y ? outsidePoint.y - h - y : y - h - outsidePoint.y;
r = R * q / Q;
const res = {
x: insidePoint.x < outsidePoint.x ? insidePoint.x + r : insidePoint.x - R + r,
y: insidePoint.y < outsidePoint.y ? insidePoint.y + Q - q : insidePoint.y - Q + q
};
if (r === 0) {
res.x = outsidePoint.x;
res.y = outsidePoint.y;
}
if (R === 0) {
res.x = outsidePoint.x;
}
if (Q === 0) {
res.y = outsidePoint.y;
}
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn(`abc89 topp/bott calc, Q ${Q}, q ${q}, R ${R}, r ${r}`, res);
return res;
} else {
if (insidePoint.x < outsidePoint.x) {
r = outsidePoint.x - w - x;
} else {
r = x - w - outsidePoint.x;
}
let q = Q * r / R;
let _x = insidePoint.x < outsidePoint.x ? insidePoint.x + R - r : insidePoint.x - R + r;
let _y = insidePoint.y < outsidePoint.y ? insidePoint.y + q : insidePoint.y - q;
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn(`sides calc abc89, Q ${Q}, q ${q}, R ${R}, r ${r}`, { _x, _y });
if (r === 0) {
_x = outsidePoint.x;
_y = outsidePoint.y;
}
if (R === 0) {
_x = outsidePoint.x;
}
if (Q === 0) {
_y = outsidePoint.y;
}
return { x: _x, y: _y };
}
};
const cutPathAtIntersect = (_points, boundryNode) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn("abc88 cutPathAtIntersect", _points, boundryNode);
let points = [];
let lastPointOutside = _points[0];
let isInside = false;
_points.forEach((point2) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("abc88 checking point", point2, boundryNode);
if (!outsideNode(boundryNode, point2) && !isInside) {
const inter = intersection(boundryNode, lastPointOutside, point2);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn("abc88 inside", point2, lastPointOutside, inter);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn("abc88 intersection", inter);
let pointPresent = false;
points.forEach((p) => {
pointPresent = pointPresent || p.x === inter.x && p.y === inter.y;
});
if (!points.some((e) => e.x === inter.x && e.y === inter.y)) {
points.push(inter);
} else {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn("abc88 no intersect", inter, points);
}
isInside = true;
} else {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn("abc88 outside", point2, lastPointOutside);
lastPointOutside = point2;
if (!isInside) {
points.push(point2);
}
}
});
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.warn("abc88 returning points", points);
return points;
};
const insertEdge = function(elem, e, edge, clusterDb, diagramType, graph, id) {
let points = edge.points;
let pointsHasChanged = false;
const tail = graph.node(e.v);
var head = graph.node(e.w);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("abc88 InsertEdge: ", edge);
if (head.intersect && tail.intersect) {
points = points.slice(1, edge.points.length - 1);
points.unshift(tail.intersect(points[0]));
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info(
"Last point",
points[points.length - 1],
head,
head.intersect(points[points.length - 1])
);
points.push(head.intersect(points[points.length - 1]));
}
if (edge.toCluster) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("to cluster abc88", clusterDb[edge.toCluster]);
points = cutPathAtIntersect(edge.points, clusterDb[edge.toCluster].node);
pointsHasChanged = true;
}
if (edge.fromCluster) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("from cluster abc88", clusterDb[edge.fromCluster]);
points = cutPathAtIntersect(points.reverse(), clusterDb[edge.fromCluster].node).reverse();
pointsHasChanged = true;
}
const lineData = points.filter((p) => !Number.isNaN(p.y));
let curve = d3__WEBPACK_IMPORTED_MODULE_0__/* .curveBasis */ .$0Z;
if (edge.curve && (diagramType === "graph" || diagramType === "flowchart")) {
curve = edge.curve;
}
const { x, y } = getLineFunctionsWithOffset(edge);
const lineFunction = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .line */ .jvg)().x(x).y(y).curve(curve);
let strokeClasses;
switch (edge.thickness) {
case "normal":
strokeClasses = "edge-thickness-normal";
break;
case "thick":
strokeClasses = "edge-thickness-thick";
break;
case "invisible":
strokeClasses = "edge-thickness-thick";
break;
default:
strokeClasses = "";
}
switch (edge.pattern) {
case "solid":
strokeClasses += " edge-pattern-solid";
break;
case "dotted":
strokeClasses += " edge-pattern-dotted";
break;
case "dashed":
strokeClasses += " edge-pattern-dashed";
break;
}
const svgPath = elem.append("path").attr("d", lineFunction(lineData)).attr("id", edge.id).attr("class", " " + strokeClasses + (edge.classes ? " " + edge.classes : "")).attr("style", edge.style);
let url = "";
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().flowchart.arrowMarkerAbsolute || (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().state.arrowMarkerAbsolute) {
url = window.location.protocol + "//" + window.location.host + window.location.pathname + window.location.search;
url = url.replace(/\(/g, "\\(");
url = url.replace(/\)/g, "\\)");
}
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("arrowTypeStart", edge.arrowTypeStart);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.info("arrowTypeEnd", edge.arrowTypeEnd);
switch (edge.arrowTypeStart) {
case "arrow_cross":
svgPath.attr(
"marker-start",
"url(" + url + "#" + id + "_" + diagramType + "-crossStart)"
);
break;
case "arrow_point":
svgPath.attr(
"marker-start",
"url(" + url + "#" + id + "_" + diagramType + "-pointStart)"
);
break;
case "arrow_barb":
svgPath.attr(
"marker-start",
"url(" + url + "#" + id + "_" + diagramType + "-barbStart)"
);
break;
case "arrow_circle":
svgPath.attr(
"marker-start",
"url(" + url + "#" + id + "_" + diagramType + "-circleStart)"
);
break;
case "aggregation":
svgPath.attr(
"marker-start",
"url(" + url + "#" + id + "_" + diagramType + "-aggregationStart)"
);
break;
case "extension":
svgPath.attr(
"marker-start",
"url(" + url + "#" + id + "_" + diagramType + "-extensionStart)"
);
break;
case "composition":
svgPath.attr(
"marker-start",
"url(" + url + "#" + id + "_" + diagramType + "-compositionStart)"
);
break;
case "dependency":
svgPath.attr(
"marker-start",
"url(" + url + "#" + id + "_" + diagramType + "-dependencyStart)"
);
break;
case "lollipop":
svgPath.attr(
"marker-start",
"url(" + url + "#" + id + "_" + diagramType + "-lollipopStart)"
);
break;
}
switch (edge.arrowTypeEnd) {
case "arrow_cross":
svgPath.attr("marker-end", "url(" + url + "#" + id + "_" + diagramType + "-crossEnd)");
break;
case "arrow_point":
svgPath.attr("marker-end", "url(" + url + "#" + id + "_" + diagramType + "-pointEnd)");
break;
case "arrow_barb":
svgPath.attr("marker-end", "url(" + url + "#" + id + "_" + diagramType + "-barbEnd)");
break;
case "arrow_circle":
svgPath.attr("marker-end", "url(" + url + "#" + id + "_" + diagramType + "-circleEnd)");
break;
case "aggregation":
svgPath.attr(
"marker-end",
"url(" + url + "#" + id + "_" + diagramType + "-aggregationEnd)"
);
break;
case "extension":
svgPath.attr(
"marker-end",
"url(" + url + "#" + id + "_" + diagramType + "-extensionEnd)"
);
break;
case "composition":
svgPath.attr(
"marker-end",
"url(" + url + "#" + id + "_" + diagramType + "-compositionEnd)"
);
break;
case "dependency":
svgPath.attr(
"marker-end",
"url(" + url + "#" + id + "_" + diagramType + "-dependencyEnd)"
);
break;
case "lollipop":
svgPath.attr(
"marker-end",
"url(" + url + "#" + id + "_" + diagramType + "-lollipopEnd)"
);
break;
}
let paths = {};
if (pointsHasChanged) {
paths.updatedPath = points;
}
paths.originalPath = edge.points;
return paths;
};
/***/ }),
/***/ 87936:
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ r: () => (/* binding */ render)
/* harmony export */ });
/* harmony import */ var dagre_d3_es_src_dagre_index_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(41644);
/* harmony import */ var dagre_d3_es_src_graphlib_json_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(39354);
/* harmony import */ var _edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(25269);
/* harmony import */ var _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(85322);
/* harmony import */ var dagre_d3_es_src_graphlib_index_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(45625);
/* harmony import */ var _createText_aebacdfe_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(54511);
/* harmony import */ var d3__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(64218);
let clusterDb = {};
let descendants = {};
let parents = {};
const clear$1 = () => {
descendants = {};
parents = {};
clusterDb = {};
};
const isDescendant = (id, ancenstorId) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.trace("In isDecendant", ancenstorId, " ", id, " = ", descendants[ancenstorId].includes(id));
if (descendants[ancenstorId].includes(id)) {
return true;
}
return false;
};
const edgeInCluster = (edge, clusterId) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Decendants of ", clusterId, " is ", descendants[clusterId]);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Edge is ", edge);
if (edge.v === clusterId) {
return false;
}
if (edge.w === clusterId) {
return false;
}
if (!descendants[clusterId]) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Tilt, ", clusterId, ",not in decendants");
return false;
}
return descendants[clusterId].includes(edge.v) || isDescendant(edge.v, clusterId) || isDescendant(edge.w, clusterId) || descendants[clusterId].includes(edge.w);
};
const copy = (clusterId, graph, newGraph, rootId) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn(
"Copying children of ",
clusterId,
"root",
rootId,
"data",
graph.node(clusterId),
rootId
);
const nodes = graph.children(clusterId) || [];
if (clusterId !== rootId) {
nodes.push(clusterId);
}
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Copying (nodes) clusterId", clusterId, "nodes", nodes);
nodes.forEach((node) => {
if (graph.children(node).length > 0) {
copy(node, graph, newGraph, rootId);
} else {
const data = graph.node(node);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("cp ", node, " to ", rootId, " with parent ", clusterId);
newGraph.setNode(node, data);
if (rootId !== graph.parent(node)) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Setting parent", node, graph.parent(node));
newGraph.setParent(node, graph.parent(node));
}
if (clusterId !== rootId && node !== clusterId) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Setting parent", node, clusterId);
newGraph.setParent(node, clusterId);
} else {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("In copy ", clusterId, "root", rootId, "data", graph.node(clusterId), rootId);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug(
"Not Setting parent for node=",
node,
"cluster!==rootId",
clusterId !== rootId,
"node!==clusterId",
node !== clusterId
);
}
const edges = graph.edges(node);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Copying Edges", edges);
edges.forEach((edge) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Edge", edge);
const data2 = graph.edge(edge.v, edge.w, edge.name);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Edge data", data2, rootId);
try {
if (edgeInCluster(edge, rootId)) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Copying as ", edge.v, edge.w, data2, edge.name);
newGraph.setEdge(edge.v, edge.w, data2, edge.name);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("newGraph edges ", newGraph.edges(), newGraph.edge(newGraph.edges()[0]));
} else {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info(
"Skipping copy of edge ",
edge.v,
"-->",
edge.w,
" rootId: ",
rootId,
" clusterId:",
clusterId
);
}
} catch (e) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.error(e);
}
});
}
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Removing node", node);
graph.removeNode(node);
});
};
const extractDescendants = (id, graph) => {
const children = graph.children(id);
let res = [...children];
for (const child of children) {
parents[child] = id;
res = [...res, ...extractDescendants(child, graph)];
}
return res;
};
const findNonClusterChild = (id, graph) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.trace("Searching", id);
const children = graph.children(id);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.trace("Searching children of id ", id, children);
if (children.length < 1) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.trace("This is a valid node", id);
return id;
}
for (const child of children) {
const _id = findNonClusterChild(child, graph);
if (_id) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.trace("Found replacement for", id, " => ", _id);
return _id;
}
}
};
const getAnchorId = (id) => {
if (!clusterDb[id]) {
return id;
}
if (!clusterDb[id].externalConnections) {
return id;
}
if (clusterDb[id]) {
return clusterDb[id].id;
}
return id;
};
const adjustClustersAndEdges = (graph, depth) => {
if (!graph || depth > 10) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Opting out, no graph ");
return;
} else {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Opting in, graph ");
}
graph.nodes().forEach(function(id) {
const children = graph.children(id);
if (children.length > 0) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn(
"Cluster identified",
id,
" Replacement id in edges: ",
findNonClusterChild(id, graph)
);
descendants[id] = extractDescendants(id, graph);
clusterDb[id] = { id: findNonClusterChild(id, graph), clusterData: graph.node(id) };
}
});
graph.nodes().forEach(function(id) {
const children = graph.children(id);
const edges = graph.edges();
if (children.length > 0) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Cluster identified", id, descendants);
edges.forEach((edge) => {
if (edge.v !== id && edge.w !== id) {
const d1 = isDescendant(edge.v, id);
const d2 = isDescendant(edge.w, id);
if (d1 ^ d2) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Edge: ", edge, " leaves cluster ", id);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Decendants of XXX ", id, ": ", descendants[id]);
clusterDb[id].externalConnections = true;
}
}
});
} else {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Not a cluster ", id, descendants);
}
});
graph.edges().forEach(function(e) {
const edge = graph.edge(e);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Edge " + e.v + " -> " + e.w + ": " + JSON.stringify(e));
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Edge " + e.v + " -> " + e.w + ": " + JSON.stringify(graph.edge(e)));
let v = e.v;
let w = e.w;
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn(
"Fix XXX",
clusterDb,
"ids:",
e.v,
e.w,
"Translating: ",
clusterDb[e.v],
" --- ",
clusterDb[e.w]
);
if (clusterDb[e.v] && clusterDb[e.w] && clusterDb[e.v] === clusterDb[e.w]) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Fixing and trixing link to self - removing XXX", e.v, e.w, e.name);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Fixing and trixing - removing XXX", e.v, e.w, e.name);
v = getAnchorId(e.v);
w = getAnchorId(e.w);
graph.removeEdge(e.v, e.w, e.name);
const specialId = e.w + "---" + e.v;
graph.setNode(specialId, {
domId: specialId,
id: specialId,
labelStyle: "",
labelText: edge.label,
padding: 0,
shape: "labelRect",
style: ""
});
const edge1 = structuredClone(edge);
const edge2 = structuredClone(edge);
edge1.label = "";
edge1.arrowTypeEnd = "none";
edge2.label = "";
edge1.fromCluster = e.v;
edge2.toCluster = e.v;
graph.setEdge(v, specialId, edge1, e.name + "-cyclic-special");
graph.setEdge(specialId, w, edge2, e.name + "-cyclic-special");
} else if (clusterDb[e.v] || clusterDb[e.w]) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Fixing and trixing - removing XXX", e.v, e.w, e.name);
v = getAnchorId(e.v);
w = getAnchorId(e.w);
graph.removeEdge(e.v, e.w, e.name);
if (v !== e.v) {
edge.fromCluster = e.v;
}
if (w !== e.w) {
edge.toCluster = e.w;
}
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Fix Replacing with XXX", v, w, e.name);
graph.setEdge(v, w, edge, e.name);
}
});
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Adjusted Graph", dagre_d3_es_src_graphlib_json_js__WEBPACK_IMPORTED_MODULE_1__/* .write */ .c(graph));
extractor(graph, 0);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.trace(clusterDb);
};
const extractor = (graph, depth) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("extractor - ", depth, dagre_d3_es_src_graphlib_json_js__WEBPACK_IMPORTED_MODULE_1__/* .write */ .c(graph), graph.children("D"));
if (depth > 10) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.error("Bailing out");
return;
}
let nodes = graph.nodes();
let hasChildren = false;
for (const node of nodes) {
const children = graph.children(node);
hasChildren = hasChildren || children.length > 0;
}
if (!hasChildren) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Done, no node has children", graph.nodes());
return;
}
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Nodes = ", nodes, depth);
for (const node of nodes) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug(
"Extracting node",
node,
clusterDb,
clusterDb[node] && !clusterDb[node].externalConnections,
!graph.parent(node),
graph.node(node),
graph.children("D"),
" Depth ",
depth
);
if (!clusterDb[node]) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Not a cluster", node, depth);
} else if (!clusterDb[node].externalConnections && // !graph.parent(node) &&
graph.children(node) && graph.children(node).length > 0) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn(
"Cluster without external connections, without a parent and with children",
node,
depth
);
const graphSettings = graph.graph();
let dir = graphSettings.rankdir === "TB" ? "LR" : "TB";
if (clusterDb[node] && clusterDb[node].clusterData && clusterDb[node].clusterData.dir) {
dir = clusterDb[node].clusterData.dir;
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Fixing dir", clusterDb[node].clusterData.dir, dir);
}
const clusterGraph = new dagre_d3_es_src_graphlib_index_js__WEBPACK_IMPORTED_MODULE_2__/* .Graph */ .k({
multigraph: true,
compound: true
}).setGraph({
rankdir: dir,
// Todo: set proper spacing
nodesep: 50,
ranksep: 50,
marginx: 8,
marginy: 8
}).setDefaultEdgeLabel(function() {
return {};
});
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Old graph before copy", dagre_d3_es_src_graphlib_json_js__WEBPACK_IMPORTED_MODULE_1__/* .write */ .c(graph));
copy(node, graph, clusterGraph, node);
graph.setNode(node, {
clusterNode: true,
id: node,
clusterData: clusterDb[node].clusterData,
labelText: clusterDb[node].labelText,
graph: clusterGraph
});
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("New graph after copy node: (", node, ")", dagre_d3_es_src_graphlib_json_js__WEBPACK_IMPORTED_MODULE_1__/* .write */ .c(clusterGraph));
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug("Old graph after copy", dagre_d3_es_src_graphlib_json_js__WEBPACK_IMPORTED_MODULE_1__/* .write */ .c(graph));
} else {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn(
"Cluster ** ",
node,
" **not meeting the criteria !externalConnections:",
!clusterDb[node].externalConnections,
" no parent: ",
!graph.parent(node),
" children ",
graph.children(node) && graph.children(node).length > 0,
graph.children("D"),
depth
);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.debug(clusterDb);
}
}
nodes = graph.nodes();
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("New list of nodes", nodes);
for (const node of nodes) {
const data = graph.node(node);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn(" Now next level", node, data);
if (data.clusterNode) {
extractor(data.graph, depth + 1);
}
}
};
const sorter = (graph, nodes) => {
if (nodes.length === 0) {
return [];
}
let result = Object.assign(nodes);
nodes.forEach((node) => {
const children = graph.children(node);
const sorted = sorter(graph, children);
result = [...result, ...sorted];
});
return result;
};
const sortNodesByHierarchy = (graph) => sorter(graph, graph.children());
const rect = (parent, node) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Creating subgraph rect for ", node.id, node);
const shapeSvg = parent.insert("g").attr("class", "cluster" + (node.class ? " " + node.class : "")).attr("id", node.id);
const rect2 = shapeSvg.insert("rect", ":first-child");
const useHtmlLabels = (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.c)().flowchart.htmlLabels);
const label = shapeSvg.insert("g").attr("class", "cluster-label");
const text = node.labelType === "markdown" ? (0,_createText_aebacdfe_js__WEBPACK_IMPORTED_MODULE_5__.a)(label, node.labelText, { style: node.labelStyle, useHtmlLabels }) : label.node().appendChild((0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.c)(node.labelText, node.labelStyle, void 0, true));
let bbox = text.getBBox();
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.c)().flowchart.htmlLabels)) {
const div = text.children[0];
const dv = (0,d3__WEBPACK_IMPORTED_MODULE_3__/* .select */ .Ys)(text);
bbox = div.getBoundingClientRect();
dv.attr("width", bbox.width);
dv.attr("height", bbox.height);
}
const padding = 0 * node.padding;
const halfPadding = padding / 2;
const width = node.width <= bbox.width + padding ? bbox.width + padding : node.width;
if (node.width <= bbox.width + padding) {
node.diff = (bbox.width - node.width) / 2 - node.padding / 2;
} else {
node.diff = -node.padding / 2;
}
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.trace("Data ", node, JSON.stringify(node));
rect2.attr("style", node.style).attr("rx", node.rx).attr("ry", node.ry).attr("x", node.x - width / 2).attr("y", node.y - node.height / 2 - halfPadding).attr("width", width).attr("height", node.height + padding);
if (useHtmlLabels) {
label.attr(
"transform",
// This puts the labal on top of the box instead of inside it
"translate(" + (node.x - bbox.width / 2) + ", " + (node.y - node.height / 2) + ")"
);
} else {
label.attr(
"transform",
// This puts the labal on top of the box instead of inside it
"translate(" + node.x + ", " + (node.y - node.height / 2) + ")"
);
}
const rectBox = rect2.node().getBBox();
node.width = rectBox.width;
node.height = rectBox.height;
node.intersect = function(point) {
return (0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.i)(node, point);
};
return shapeSvg;
};
const noteGroup = (parent, node) => {
const shapeSvg = parent.insert("g").attr("class", "note-cluster").attr("id", node.id);
const rect2 = shapeSvg.insert("rect", ":first-child");
const padding = 0 * node.padding;
const halfPadding = padding / 2;
rect2.attr("rx", node.rx).attr("ry", node.ry).attr("x", node.x - node.width / 2 - halfPadding).attr("y", node.y - node.height / 2 - halfPadding).attr("width", node.width + padding).attr("height", node.height + padding).attr("fill", "none");
const rectBox = rect2.node().getBBox();
node.width = rectBox.width;
node.height = rectBox.height;
node.intersect = function(point) {
return (0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.i)(node, point);
};
return shapeSvg;
};
const roundedWithTitle = (parent, node) => {
const shapeSvg = parent.insert("g").attr("class", node.classes).attr("id", node.id);
const rect2 = shapeSvg.insert("rect", ":first-child");
const label = shapeSvg.insert("g").attr("class", "cluster-label");
const innerRect = shapeSvg.append("rect");
const text = label.node().appendChild((0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.c)(node.labelText, node.labelStyle, void 0, true));
let bbox = text.getBBox();
if ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.c)().flowchart.htmlLabels)) {
const div = text.children[0];
const dv = (0,d3__WEBPACK_IMPORTED_MODULE_3__/* .select */ .Ys)(text);
bbox = div.getBoundingClientRect();
dv.attr("width", bbox.width);
dv.attr("height", bbox.height);
}
bbox = text.getBBox();
const padding = 0 * node.padding;
const halfPadding = padding / 2;
const width = node.width <= bbox.width + node.padding ? bbox.width + node.padding : node.width;
if (node.width <= bbox.width + node.padding) {
node.diff = (bbox.width + node.padding * 0 - node.width) / 2;
} else {
node.diff = -node.padding / 2;
}
rect2.attr("class", "outer").attr("x", node.x - width / 2 - halfPadding).attr("y", node.y - node.height / 2 - halfPadding).attr("width", width + padding).attr("height", node.height + padding);
innerRect.attr("class", "inner").attr("x", node.x - width / 2 - halfPadding).attr("y", node.y - node.height / 2 - halfPadding + bbox.height - 1).attr("width", width + padding).attr("height", node.height + padding - bbox.height - 3);
label.attr(
"transform",
"translate(" + (node.x - bbox.width / 2) + ", " + (node.y - node.height / 2 - node.padding / 3 + ((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.m)((0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.c)().flowchart.htmlLabels) ? 5 : 3)) + ")"
);
const rectBox = rect2.node().getBBox();
node.height = rectBox.height;
node.intersect = function(point) {
return (0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.i)(node, point);
};
return shapeSvg;
};
const divider = (parent, node) => {
const shapeSvg = parent.insert("g").attr("class", node.classes).attr("id", node.id);
const rect2 = shapeSvg.insert("rect", ":first-child");
const padding = 0 * node.padding;
const halfPadding = padding / 2;
rect2.attr("class", "divider").attr("x", node.x - node.width / 2 - halfPadding).attr("y", node.y - node.height / 2).attr("width", node.width + padding).attr("height", node.height + padding);
const rectBox = rect2.node().getBBox();
node.width = rectBox.width;
node.height = rectBox.height;
node.diff = -node.padding / 2;
node.intersect = function(point) {
return (0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.i)(node, point);
};
return shapeSvg;
};
const shapes = { rect, roundedWithTitle, noteGroup, divider };
let clusterElems = {};
const insertCluster = (elem, node) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.trace("Inserting cluster");
const shape = node.shape || "rect";
clusterElems[node.id] = shapes[shape](elem, node);
};
const clear = () => {
clusterElems = {};
};
const recursiveRender = async (_elem, graph, diagramtype, id, parentCluster) => {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Graph in recursive render: XXX", dagre_d3_es_src_graphlib_json_js__WEBPACK_IMPORTED_MODULE_1__/* .write */ .c(graph), parentCluster);
const dir = graph.graph().rankdir;
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.trace("Dir in recursive render - dir:", dir);
const elem = _elem.insert("g").attr("class", "root");
if (!graph.nodes()) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("No nodes found for", graph);
} else {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Recursive render XXX", graph.nodes());
}
if (graph.edges().length > 0) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.trace("Recursive edges", graph.edge(graph.edges()[0]));
}
const clusters = elem.insert("g").attr("class", "clusters");
const edgePaths = elem.insert("g").attr("class", "edgePaths");
const edgeLabels = elem.insert("g").attr("class", "edgeLabels");
const nodes = elem.insert("g").attr("class", "nodes");
await Promise.all(
graph.nodes().map(async function(v) {
const node = graph.node(v);
if (parentCluster !== void 0) {
const data = JSON.parse(JSON.stringify(parentCluster.clusterData));
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Setting data for cluster XXX (", v, ") ", data, parentCluster);
graph.setNode(parentCluster.id, data);
if (!graph.parent(v)) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.trace("Setting parent", v, parentCluster.id);
graph.setParent(v, parentCluster.id, data);
}
}
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("(Insert) Node XXX" + v + ": " + JSON.stringify(graph.node(v)));
if (node && node.clusterNode) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Cluster identified", v, node.width, graph.node(v));
const o = await recursiveRender(nodes, node.graph, diagramtype, id, graph.node(v));
const newEl = o.elem;
(0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.u)(node, newEl);
node.diff = o.diff || 0;
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Node bounds (abc123)", v, node, node.width, node.x, node.y);
(0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.s)(newEl, node);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Recursive render complete ", newEl, node);
} else {
if (graph.children(v).length > 0) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Cluster - the non recursive path XXX", v, node.id, node, graph);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info(findNonClusterChild(node.id, graph));
clusterDb[node.id] = { id: findNonClusterChild(node.id, graph), node };
} else {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Node - the non recursive path", v, node.id, node);
await (0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.e)(nodes, graph.node(v), dir);
}
}
})
);
graph.edges().forEach(function(e) {
const edge = graph.edge(e.v, e.w, e.name);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Edge " + e.v + " -> " + e.w + ": " + JSON.stringify(e));
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Edge " + e.v + " -> " + e.w + ": ", e, " ", JSON.stringify(graph.edge(e)));
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Fix", clusterDb, "ids:", e.v, e.w, "Translateing: ", clusterDb[e.v], clusterDb[e.w]);
(0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.f)(edgeLabels, edge);
});
graph.edges().forEach(function(e) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Edge " + e.v + " -> " + e.w + ": " + JSON.stringify(e));
});
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("#############################################");
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("### Layout ###");
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("#############################################");
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info(graph);
(0,dagre_d3_es_src_dagre_index_js__WEBPACK_IMPORTED_MODULE_0__/* .layout */ .bK)(graph);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Graph after layout:", dagre_d3_es_src_graphlib_json_js__WEBPACK_IMPORTED_MODULE_1__/* .write */ .c(graph));
let diff = 0;
sortNodesByHierarchy(graph).forEach(function(v) {
const node = graph.node(v);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Position " + v + ": " + JSON.stringify(graph.node(v)));
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info(
"Position " + v + ": (" + node.x,
"," + node.y,
") width: ",
node.width,
" height: ",
node.height
);
if (node && node.clusterNode) {
(0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.p)(node);
} else {
if (graph.children(v).length > 0) {
insertCluster(clusters, node);
clusterDb[node.id].node = node;
} else {
(0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.p)(node);
}
}
});
graph.edges().forEach(function(e) {
const edge = graph.edge(e);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info("Edge " + e.v + " -> " + e.w + ": " + JSON.stringify(edge), edge);
const paths = (0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.g)(edgePaths, e, edge, clusterDb, diagramtype, graph, id);
(0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.h)(edge, paths);
});
graph.nodes().forEach(function(v) {
const n = graph.node(v);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.info(v, n.type, n.diff);
if (n.type === "group") {
diff = n.diff;
}
});
return { elem, diff };
};
const render = async (elem, graph, markers, diagramtype, id) => {
(0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.a)(elem, markers, diagramtype, id);
(0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.b)();
(0,_edges_80f1ebb6_js__WEBPACK_IMPORTED_MODULE_6__.d)();
clear();
clear$1();
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Graph at first:", JSON.stringify(dagre_d3_es_src_graphlib_json_js__WEBPACK_IMPORTED_MODULE_1__/* .write */ .c(graph)));
adjustClustersAndEdges(graph);
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_4__.l.warn("Graph after:", JSON.stringify(dagre_d3_es_src_graphlib_json_js__WEBPACK_IMPORTED_MODULE_1__/* .write */ .c(graph)));
await recursiveRender(elem, graph, diagramtype, id);
};
/***/ }),
/***/ 54706:
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ d: () => (/* binding */ db),
/* harmony export */ p: () => (/* binding */ parser$1),
/* harmony export */ s: () => (/* binding */ styles)
/* harmony export */ });
/* harmony import */ var d3__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(64218);
/* harmony import */ var _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(85322);
var parser = function() {
var o = function(k, v, o2, l) {
for (o2 = o2 || {}, l = k.length; l--; o2[k[l]] = v)
;
return o2;
}, $V0 = [1, 16], $V1 = [1, 17], $V2 = [1, 18], $V3 = [1, 37], $V4 = [1, 38], $V5 = [1, 24], $V6 = [1, 22], $V7 = [1, 23], $V8 = [1, 29], $V9 = [1, 30], $Va = [1, 31], $Vb = [1, 32], $Vc = [1, 33], $Vd = [1, 34], $Ve = [1, 25], $Vf = [1, 26], $Vg = [1, 27], $Vh = [1, 28], $Vi = [1, 42], $Vj = [1, 39], $Vk = [1, 40], $Vl = [1, 41], $Vm = [1, 43], $Vn = [1, 9], $Vo = [1, 8, 9], $Vp = [1, 54], $Vq = [1, 55], $Vr = [1, 56], $Vs = [1, 57], $Vt = [1, 58], $Vu = [1, 59], $Vv = [1, 60], $Vw = [1, 8, 9, 38], $Vx = [1, 71], $Vy = [1, 8, 9, 12, 13, 21, 36, 38, 41, 58, 59, 60, 61, 62, 63, 64, 69, 71], $Vz = [1, 8, 9, 12, 13, 19, 21, 36, 38, 41, 45, 58, 59, 60, 61, 62, 63, 64, 69, 71, 84, 86, 87, 88, 89], $VA = [13, 84, 86, 87, 88, 89], $VB = [13, 63, 64, 84, 86, 87, 88, 89], $VC = [13, 58, 59, 60, 61, 62, 84, 86, 87, 88, 89], $VD = [1, 90], $VE = [1, 8, 9, 36, 38, 41], $VF = [1, 8, 9, 21];
var parser2 = {
trace: function trace() {
},
yy: {},
symbols_: { "error": 2, "start": 3, "mermaidDoc": 4, "statements": 5, "graphConfig": 6, "CLASS_DIAGRAM": 7, "NEWLINE": 8, "EOF": 9, "statement": 10, "classLabel": 11, "SQS": 12, "STR": 13, "SQE": 14, "namespaceName": 15, "alphaNumToken": 16, "className": 17, "classLiteralName": 18, "GENERICTYPE": 19, "relationStatement": 20, "LABEL": 21, "namespaceStatement": 22, "classStatement": 23, "memberStatement": 24, "annotationStatement": 25, "clickStatement": 26, "cssClassStatement": 27, "noteStatement": 28, "direction": 29, "acc_title": 30, "acc_title_value": 31, "acc_descr": 32, "acc_descr_value": 33, "acc_descr_multiline_value": 34, "namespaceIdentifier": 35, "STRUCT_START": 36, "classStatements": 37, "STRUCT_STOP": 38, "NAMESPACE": 39, "classIdentifier": 40, "STYLE_SEPARATOR": 41, "members": 42, "CLASS": 43, "ANNOTATION_START": 44, "ANNOTATION_END": 45, "MEMBER": 46, "SEPARATOR": 47, "relation": 48, "NOTE_FOR": 49, "noteText": 50, "NOTE": 51, "direction_tb": 52, "direction_bt": 53, "direction_rl": 54, "direction_lr": 55, "relationType": 56, "lineType": 57, "AGGREGATION": 58, "EXTENSION": 59, "COMPOSITION": 60, "DEPENDENCY": 61, "LOLLIPOP": 62, "LINE": 63, "DOTTED_LINE": 64, "CALLBACK": 65, "LINK": 66, "LINK_TARGET": 67, "CLICK": 68, "CALLBACK_NAME": 69, "CALLBACK_ARGS": 70, "HREF": 71, "CSSCLASS": 72, "commentToken": 73, "textToken": 74, "graphCodeTokens": 75, "textNoTagsToken": 76, "TAGSTART": 77, "TAGEND": 78, "==": 79, "--": 80, "PCT": 81, "DEFAULT": 82, "SPACE": 83, "MINUS": 84, "keywords": 85, "UNICODE_TEXT": 86, "NUM": 87, "ALPHA": 88, "BQUOTE_STR": 89, "$accept": 0, "$end": 1 },
terminals_: { 2: "error", 7: "CLASS_DIAGRAM", 8: "NEWLINE", 9: "EOF", 12: "SQS", 13: "STR", 14: "SQE", 19: "GENERICTYPE", 21: "LABEL", 30: "acc_title", 31: "acc_title_value", 32: "acc_descr", 33: "acc_descr_value", 34: "acc_descr_multiline_value", 36: "STRUCT_START", 38: "STRUCT_STOP", 39: "NAMESPACE", 41: "STYLE_SEPARATOR", 43: "CLASS", 44: "ANNOTATION_START", 45: "ANNOTATION_END", 46: "MEMBER", 47: "SEPARATOR", 49: "NOTE_FOR", 51: "NOTE", 52: "direction_tb", 53: "direction_bt", 54: "direction_rl", 55: "direction_lr", 58: "AGGREGATION", 59: "EXTENSION", 60: "COMPOSITION", 61: "DEPENDENCY", 62: "LOLLIPOP", 63: "LINE", 64: "DOTTED_LINE", 65: "CALLBACK", 66: "LINK", 67: "LINK_TARGET", 68: "CLICK", 69: "CALLBACK_NAME", 70: "CALLBACK_ARGS", 71: "HREF", 72: "CSSCLASS", 75: "graphCodeTokens", 77: "TAGSTART", 78: "TAGEND", 79: "==", 80: "--", 81: "PCT", 82: "DEFAULT", 83: "SPACE", 84: "MINUS", 85: "keywords", 86: "UNICODE_TEXT", 87: "NUM", 88: "ALPHA", 89: "BQUOTE_STR" },
productions_: [0, [3, 1], [3, 1], [4, 1], [6, 4], [5, 1], [5, 2], [5, 3], [11, 3], [15, 1], [15, 2], [17, 1], [17, 1], [17, 2], [17, 2], [17, 2], [10, 1], [10, 2], [10, 1], [10, 1], [10, 1], [10, 1], [10, 1], [10, 1], [10, 1], [10, 1], [10, 2], [10, 2], [10, 1], [22, 4], [22, 5], [35, 2], [37, 1], [37, 2], [37, 3], [23, 1], [23, 3], [23, 4], [23, 6], [40, 2], [40, 3], [25, 4], [42, 1], [42, 2], [24, 1], [24, 2], [24, 1], [24, 1], [20, 3], [20, 4], [20, 4], [20, 5], [28, 3], [28, 2], [29, 1], [29, 1], [29, 1], [29, 1], [48, 3], [48, 2], [48, 2], [48, 1], [56, 1], [56, 1], [56, 1], [56, 1], [56, 1], [57, 1], [57, 1], [26, 3], [26, 4], [26, 3], [26, 4], [26, 4], [26, 5], [26, 3], [26, 4], [26, 4], [26, 5], [26, 4], [26, 5], [26, 5], [26, 6], [27, 3], [73, 1], [73, 1], [74, 1], [74, 1], [74, 1], [74, 1], [74, 1], [74, 1], [74, 1], [76, 1], [76, 1], [76, 1], [76, 1], [16, 1], [16, 1], [16, 1], [16, 1], [18, 1], [50, 1]],
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$) {
var $0 = $$.length - 1;
switch (yystate) {
case 8:
this.$ = $$[$0 - 1];
break;
case 9:
case 11:
case 12:
this.$ = $$[$0];
break;
case 10:
case 13:
this.$ = $$[$0 - 1] + $$[$0];
break;
case 14:
case 15:
this.$ = $$[$0 - 1] + "~" + $$[$0] + "~";
break;
case 16:
yy.addRelation($$[$0]);
break;
case 17:
$$[$0 - 1].title = yy.cleanupLabel($$[$0]);
yy.addRelation($$[$0 - 1]);
break;
case 26:
this.$ = $$[$0].trim();
yy.setAccTitle(this.$);
break;
case 27:
case 28:
this.$ = $$[$0].trim();
yy.setAccDescription(this.$);
break;
case 29:
yy.addClassesToNamespace($$[$0 - 3], $$[$0 - 1]);
break;
case 30:
yy.addClassesToNamespace($$[$0 - 4], $$[$0 - 1]);
break;
case 31:
this.$ = $$[$0];
yy.addNamespace($$[$0]);
break;
case 32:
this.$ = [$$[$0]];
break;
case 33:
this.$ = [$$[$0 - 1]];
break;
case 34:
$$[$0].unshift($$[$0 - 2]);
this.$ = $$[$0];
break;
case 36:
yy.setCssClass($$[$0 - 2], $$[$0]);
break;
case 37:
yy.addMembers($$[$0 - 3], $$[$0 - 1]);
break;
case 38:
yy.setCssClass($$[$0 - 5], $$[$0 - 3]);
yy.addMembers($$[$0 - 5], $$[$0 - 1]);
break;
case 39:
this.$ = $$[$0];
yy.addClass($$[$0]);
break;
case 40:
this.$ = $$[$0 - 1];
yy.addClass($$[$0 - 1]);
yy.setClassLabel($$[$0 - 1], $$[$0]);
break;
case 41:
yy.addAnnotation($$[$0], $$[$0 - 2]);
break;
case 42:
this.$ = [$$[$0]];
break;
case 43:
$$[$0].push($$[$0 - 1]);
this.$ = $$[$0];
break;
case 44:
break;
case 45:
yy.addMember($$[$0 - 1], yy.cleanupLabel($$[$0]));
break;
case 46:
break;
case 47:
break;
case 48:
this.$ = { "id1": $$[$0 - 2], "id2": $$[$0], relation: $$[$0 - 1], relationTitle1: "none", relationTitle2: "none" };
break;
case 49:
this.$ = { id1: $$[$0 - 3], id2: $$[$0], relation: $$[$0 - 1], relationTitle1: $$[$0 - 2], relationTitle2: "none" };
break;
case 50:
this.$ = { id1: $$[$0 - 3], id2: $$[$0], relation: $$[$0 - 2], relationTitle1: "none", relationTitle2: $$[$0 - 1] };
break;
case 51:
this.$ = { id1: $$[$0 - 4], id2: $$[$0], relation: $$[$0 - 2], relationTitle1: $$[$0 - 3], relationTitle2: $$[$0 - 1] };
break;
case 52:
yy.addNote($$[$0], $$[$0 - 1]);
break;
case 53:
yy.addNote($$[$0]);
break;
case 54:
yy.setDirection("TB");
break;
case 55:
yy.setDirection("BT");
break;
case 56:
yy.setDirection("RL");
break;
case 57:
yy.setDirection("LR");
break;
case 58:
this.$ = { type1: $$[$0 - 2], type2: $$[$0], lineType: $$[$0 - 1] };
break;
case 59:
this.$ = { type1: "none", type2: $$[$0], lineType: $$[$0 - 1] };
break;
case 60:
this.$ = { type1: $$[$0 - 1], type2: "none", lineType: $$[$0] };
break;
case 61:
this.$ = { type1: "none", type2: "none", lineType: $$[$0] };
break;
case 62:
this.$ = yy.relationType.AGGREGATION;
break;
case 63:
this.$ = yy.relationType.EXTENSION;
break;
case 64:
this.$ = yy.relationType.COMPOSITION;
break;
case 65:
this.$ = yy.relationType.DEPENDENCY;
break;
case 66:
this.$ = yy.relationType.LOLLIPOP;
break;
case 67:
this.$ = yy.lineType.LINE;
break;
case 68:
this.$ = yy.lineType.DOTTED_LINE;
break;
case 69:
case 75:
this.$ = $$[$0 - 2];
yy.setClickEvent($$[$0 - 1], $$[$0]);
break;
case 70:
case 76:
this.$ = $$[$0 - 3];
yy.setClickEvent($$[$0 - 2], $$[$0 - 1]);
yy.setTooltip($$[$0 - 2], $$[$0]);
break;
case 71:
this.$ = $$[$0 - 2];
yy.setLink($$[$0 - 1], $$[$0]);
break;
case 72:
this.$ = $$[$0 - 3];
yy.setLink($$[$0 - 2], $$[$0 - 1], $$[$0]);
break;
case 73:
this.$ = $$[$0 - 3];
yy.setLink($$[$0 - 2], $$[$0 - 1]);
yy.setTooltip($$[$0 - 2], $$[$0]);
break;
case 74:
this.$ = $$[$0 - 4];
yy.setLink($$[$0 - 3], $$[$0 - 2], $$[$0]);
yy.setTooltip($$[$0 - 3], $$[$0 - 1]);
break;
case 77:
this.$ = $$[$0 - 3];
yy.setClickEvent($$[$0 - 2], $$[$0 - 1], $$[$0]);
break;
case 78:
this.$ = $$[$0 - 4];
yy.setClickEvent($$[$0 - 3], $$[$0 - 2], $$[$0 - 1]);
yy.setTooltip($$[$0 - 3], $$[$0]);
break;
case 79:
this.$ = $$[$0 - 3];
yy.setLink($$[$0 - 2], $$[$0]);
break;
case 80:
this.$ = $$[$0 - 4];
yy.setLink($$[$0 - 3], $$[$0 - 1], $$[$0]);
break;
case 81:
this.$ = $$[$0 - 4];
yy.setLink($$[$0 - 3], $$[$0 - 1]);
yy.setTooltip($$[$0 - 3], $$[$0]);
break;
case 82:
this.$ = $$[$0 - 5];
yy.setLink($$[$0 - 4], $$[$0 - 2], $$[$0]);
yy.setTooltip($$[$0 - 4], $$[$0 - 1]);
break;
case 83:
yy.setCssClass($$[$0 - 1], $$[$0]);
break;
}
},
table: [{ 3: 1, 4: 2, 5: 3, 6: 4, 7: [1, 6], 10: 5, 16: 35, 17: 19, 18: 36, 20: 7, 22: 8, 23: 9, 24: 10, 25: 11, 26: 12, 27: 13, 28: 14, 29: 15, 30: $V0, 32: $V1, 34: $V2, 35: 20, 39: $V3, 40: 21, 43: $V4, 44: $V5, 46: $V6, 47: $V7, 49: $V8, 51: $V9, 52: $Va, 53: $Vb, 54: $Vc, 55: $Vd, 65: $Ve, 66: $Vf, 68: $Vg, 72: $Vh, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, { 1: [3] }, { 1: [2, 1] }, { 1: [2, 2] }, { 1: [2, 3] }, o($Vn, [2, 5], { 8: [1, 44] }), { 8: [1, 45] }, o($Vo, [2, 16], { 21: [1, 46] }), o($Vo, [2, 18]), o($Vo, [2, 19]), o($Vo, [2, 20]), o($Vo, [2, 21]), o($Vo, [2, 22]), o($Vo, [2, 23]), o($Vo, [2, 24]), o($Vo, [2, 25]), { 31: [1, 47] }, { 33: [1, 48] }, o($Vo, [2, 28]), o($Vo, [2, 44], { 48: 49, 56: 52, 57: 53, 13: [1, 50], 21: [1, 51], 58: $Vp, 59: $Vq, 60: $Vr, 61: $Vs, 62: $Vt, 63: $Vu, 64: $Vv }), { 36: [1, 61] }, o($Vw, [2, 35], { 36: [1, 63], 41: [1, 62] }), o($Vo, [2, 46]), o($Vo, [2, 47]), { 16: 64, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl }, { 16: 35, 17: 65, 18: 36, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, { 16: 35, 17: 66, 18: 36, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, { 16: 35, 17: 67, 18: 36, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, { 13: [1, 68] }, { 16: 35, 17: 69, 18: 36, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, { 13: $Vx, 50: 70 }, o($Vo, [2, 54]), o($Vo, [2, 55]), o($Vo, [2, 56]), o($Vo, [2, 57]), o($Vy, [2, 11], { 16: 35, 18: 36, 17: 72, 19: [1, 73], 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }), o($Vy, [2, 12], { 19: [1, 74] }), { 15: 75, 16: 76, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl }, { 16: 35, 17: 77, 18: 36, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, o($Vz, [2, 97]), o($Vz, [2, 98]), o($Vz, [2, 99]), o($Vz, [2, 100]), o([1, 8, 9, 12, 13, 19, 21, 36, 38, 41, 58, 59, 60, 61, 62, 63, 64, 69, 71], [2, 101]), o($Vn, [2, 6], { 10: 5, 20: 7, 22: 8, 23: 9, 24: 10, 25: 11, 26: 12, 27: 13, 28: 14, 29: 15, 17: 19, 35: 20, 40: 21, 16: 35, 18: 36, 5: 78, 30: $V0, 32: $V1, 34: $V2, 39: $V3, 43: $V4, 44: $V5, 46: $V6, 47: $V7, 49: $V8, 51: $V9, 52: $Va, 53: $Vb, 54: $Vc, 55: $Vd, 65: $Ve, 66: $Vf, 68: $Vg, 72: $Vh, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }), { 5: 79, 10: 5, 16: 35, 17: 19, 18: 36, 20: 7, 22: 8, 23: 9, 24: 10, 25: 11, 26: 12, 27: 13, 28: 14, 29: 15, 30: $V0, 32: $V1, 34: $V2, 35: 20, 39: $V3, 40: 21, 43: $V4, 44: $V5, 46: $V6, 47: $V7, 49: $V8, 51: $V9, 52: $Va, 53: $Vb, 54: $Vc, 55: $Vd, 65: $Ve, 66: $Vf, 68: $Vg, 72: $Vh, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, o($Vo, [2, 17]), o($Vo, [2, 26]), o($Vo, [2, 27]), { 13: [1, 81], 16: 35, 17: 80, 18: 36, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, { 48: 82, 56: 52, 57: 53, 58: $Vp, 59: $Vq, 60: $Vr, 61: $Vs, 62: $Vt, 63: $Vu, 64: $Vv }, o($Vo, [2, 45]), { 57: 83, 63: $Vu, 64: $Vv }, o($VA, [2, 61], { 56: 84, 58: $Vp, 59: $Vq, 60: $Vr, 61: $Vs, 62: $Vt }), o($VB, [2, 62]), o($VB, [2, 63]), o($VB, [2, 64]), o($VB, [2, 65]), o($VB, [2, 66]), o($VC, [2, 67]), o($VC, [2, 68]), { 8: [1, 86], 23: 87, 37: 85, 40: 21, 43: $V4 }, { 16: 88, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl }, { 42: 89, 46: $VD }, { 45: [1, 91] }, { 13: [1, 92] }, { 13: [1, 93] }, { 69: [1, 94], 71: [1, 95] }, { 16: 96, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl }, { 13: $Vx, 50: 97 }, o($Vo, [2, 53]), o($Vo, [2, 102]), o($Vy, [2, 13]), o($Vy, [2, 14]), o($Vy, [2, 15]), { 36: [2, 31] }, { 15: 98, 16: 76, 36: [2, 9], 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl }, o($VE, [2, 39], { 11: 99, 12: [1, 100] }), o($Vn, [2, 7]), { 9: [1, 101] }, o($VF, [2, 48]), { 16: 35, 17: 102, 18: 36, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, { 13: [1, 104], 16: 35, 17: 103, 18: 36, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, o($VA, [2, 60], { 56: 105, 58: $Vp, 59: $Vq, 60: $Vr, 61: $Vs, 62: $Vt }), o($VA, [2, 59]), { 38: [1, 106] }, { 23: 87, 37: 107, 40: 21, 43: $V4 }, { 8: [1, 108], 38: [2, 32] }, o($Vw, [2, 36], { 36: [1, 109] }), { 38: [1, 110] }, { 38: [2, 42], 42: 111, 46: $VD }, { 16: 35, 17: 112, 18: 36, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, o($Vo, [2, 69], { 13: [1, 113] }), o($Vo, [2, 71], { 13: [1, 115], 67: [1, 114] }), o($Vo, [2, 75], { 13: [1, 116], 70: [1, 117] }), { 13: [1, 118] }, o($Vo, [2, 83]), o($Vo, [2, 52]), { 36: [2, 10] }, o($VE, [2, 40]), { 13: [1, 119] }, { 1: [2, 4] }, o($VF, [2, 50]), o($VF, [2, 49]), { 16: 35, 17: 120, 18: 36, 84: $Vi, 86: $Vj, 87: $Vk, 88: $Vl, 89: $Vm }, o($VA, [2, 58]), o($Vo, [2, 29]), { 38: [1, 121] }, { 23: 87, 37: 122, 38: [2, 33], 40: 21, 43: $V4 }, { 42: 123, 46: $VD }, o($Vw, [2, 37]), { 38: [2, 43] }, o($Vo, [2, 41]), o($Vo, [2, 70]), o($Vo, [2, 72]), o($Vo, [2, 73], { 67: [1, 124] }), o($Vo, [2, 76]), o($Vo, [2, 77], { 13: [1, 125] }), o($Vo, [2, 79], { 13: [1, 127], 67: [1, 126] }), { 14: [1, 128] }, o($VF, [2, 51]), o($Vo, [2, 30]), { 38: [2, 34] }, { 38: [1, 129] }, o($Vo, [2, 74]), o($Vo, [2, 78]), o($Vo, [2, 80]), o($Vo, [2, 81], { 67: [1, 130] }), o($VE, [2, 8]), o($Vw, [2, 38]), o($Vo, [2, 82])],
defaultActions: { 2: [2, 1], 3: [2, 2], 4: [2, 3], 75: [2, 31], 98: [2, 10], 101: [2, 4], 111: [2, 43], 122: [2, 34] },
parseError: function parseError(str, hash) {
if (hash.recoverable) {
this.trace(str);
} else {
var error = new Error(str);
error.hash = hash;
throw error;
}
},
parse: function parse(input) {
var self = this, stack = [0], tstack = [], vstack = [null], lstack = [], table = this.table, yytext = "", yylineno = 0, yyleng = 0, TERROR = 2, EOF = 1;
var args = lstack.slice.call(arguments, 1);
var lexer2 = Object.create(this.lexer);
var sharedState = { yy: {} };
for (var k in this.yy) {
if (Object.prototype.hasOwnProperty.call(this.yy, k)) {
sharedState.yy[k] = this.yy[k];
}
}
lexer2.setInput(input, sharedState.yy);
sharedState.yy.lexer = lexer2;
sharedState.yy.parser = this;
if (typeof lexer2.yylloc == "undefined") {
lexer2.yylloc = {};
}
var yyloc = lexer2.yylloc;
lstack.push(yyloc);
var ranges = lexer2.options && lexer2.options.ranges;
if (typeof sharedState.yy.parseError === "function") {
this.parseError = sharedState.yy.parseError;
} else {
this.parseError = Object.getPrototypeOf(this).parseError;
}
function lex() {
var token;
token = tstack.pop() || lexer2.lex() || EOF;
if (typeof token !== "number") {
if (token instanceof Array) {
tstack = token;
token = tstack.pop();
}
token = self.symbols_[token] || token;
}
return token;
}
var symbol, state, action, r, yyval = {}, p, len, newState, expected;
while (true) {
state = stack[stack.length - 1];
if (this.defaultActions[state]) {
action = this.defaultActions[state];
} else {
if (symbol === null || typeof symbol == "undefined") {
symbol = lex();
}
action = table[state] && table[state][symbol];
}
if (typeof action === "undefined" || !action.length || !action[0]) {
var errStr = "";
expected = [];
for (p in table[state]) {
if (this.terminals_[p] && p > TERROR) {
expected.push("'" + this.terminals_[p] + "'");
}
}
if (lexer2.showPosition) {
errStr = "Parse error on line " + (yylineno + 1) + ":\n" + lexer2.showPosition() + "\nExpecting " + expected.join(", ") + ", got '" + (this.terminals_[symbol] || symbol) + "'";
} else {
errStr = "Parse error on line " + (yylineno + 1) + ": Unexpected " + (symbol == EOF ? "end of input" : "'" + (this.terminals_[symbol] || symbol) + "'");
}
this.parseError(errStr, {
text: lexer2.match,
token: this.terminals_[symbol] || symbol,
line: lexer2.yylineno,
loc: yyloc,
expected
});
}
if (action[0] instanceof Array && action.length > 1) {
throw new Error("Parse Error: multiple actions possible at state: " + state + ", token: " + symbol);
}
switch (action[0]) {
case 1:
stack.push(symbol);
vstack.push(lexer2.yytext);
lstack.push(lexer2.yylloc);
stack.push(action[1]);
symbol = null;
{
yyleng = lexer2.yyleng;
yytext = lexer2.yytext;
yylineno = lexer2.yylineno;
yyloc = lexer2.yylloc;
}
break;
case 2:
len = this.productions_[action[1]][1];
yyval.$ = vstack[vstack.length - len];
yyval._$ = {
first_line: lstack[lstack.length - (len || 1)].first_line,
last_line: lstack[lstack.length - 1].last_line,
first_column: lstack[lstack.length - (len || 1)].first_column,
last_column: lstack[lstack.length - 1].last_column
};
if (ranges) {
yyval._$.range = [
lstack[lstack.length - (len || 1)].range[0],
lstack[lstack.length - 1].range[1]
];
}
r = this.performAction.apply(yyval, [
yytext,
yyleng,
yylineno,
sharedState.yy,
action[1],
vstack,
lstack
].concat(args));
if (typeof r !== "undefined") {
return r;
}
if (len) {
stack = stack.slice(0, -1 * len * 2);
vstack = vstack.slice(0, -1 * len);
lstack = lstack.slice(0, -1 * len);
}
stack.push(this.productions_[action[1]][0]);
vstack.push(yyval.$);
lstack.push(yyval._$);
newState = table[stack[stack.length - 2]][stack[stack.length - 1]];
stack.push(newState);
break;
case 3:
return true;
}
}
return true;
}
};
var lexer = function() {
var lexer2 = {
EOF: 1,
parseError: function parseError(str, hash) {
if (this.yy.parser) {
this.yy.parser.parseError(str, hash);
} else {
throw new Error(str);
}
},
// resets the lexer, sets new input
setInput: function(input, yy) {
this.yy = yy || this.yy || {};
this._input = input;
this._more = this._backtrack = this.done = false;
this.yylineno = this.yyleng = 0;
this.yytext = this.matched = this.match = "";
this.conditionStack = ["INITIAL"];
this.yylloc = {
first_line: 1,
first_column: 0,
last_line: 1,
last_column: 0
};
if (this.options.ranges) {
this.yylloc.range = [0, 0];
}
this.offset = 0;
return this;
},
// consumes and returns one char from the input
input: function() {
var ch = this._input[0];
this.yytext += ch;
this.yyleng++;
this.offset++;
this.match += ch;
this.matched += ch;
var lines = ch.match(/(?:\r\n?|\n).*/g);
if (lines) {
this.yylineno++;
this.yylloc.last_line++;
} else {
this.yylloc.last_column++;
}
if (this.options.ranges) {
this.yylloc.range[1]++;
}
this._input = this._input.slice(1);
return ch;
},
// unshifts one char (or a string) into the input
unput: function(ch) {
var len = ch.length;
var lines = ch.split(/(?:\r\n?|\n)/g);
this._input = ch + this._input;
this.yytext = this.yytext.substr(0, this.yytext.length - len);
this.offset -= len;
var oldLines = this.match.split(/(?:\r\n?|\n)/g);
this.match = this.match.substr(0, this.match.length - 1);
this.matched = this.matched.substr(0, this.matched.length - 1);
if (lines.length - 1) {
this.yylineno -= lines.length - 1;
}
var r = this.yylloc.range;
this.yylloc = {
first_line: this.yylloc.first_line,
last_line: this.yylineno + 1,
first_column: this.yylloc.first_column,
last_column: lines ? (lines.length === oldLines.length ? this.yylloc.first_column : 0) + oldLines[oldLines.length - lines.length].length - lines[0].length : this.yylloc.first_column - len
};
if (this.options.ranges) {
this.yylloc.range = [r[0], r[0] + this.yyleng - len];
}
this.yyleng = this.yytext.length;
return this;
},
// When called from action, caches matched text and appends it on next action
more: function() {
this._more = true;
return this;
},
// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
reject: function() {
if (this.options.backtrack_lexer) {
this._backtrack = true;
} else {
return this.parseError("Lexical error on line " + (this.yylineno + 1) + ". You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n" + this.showPosition(), {
text: "",
token: null,
line: this.yylineno
});
}
return this;
},
// retain first n characters of the match
less: function(n) {
this.unput(this.match.slice(n));
},
// displays already matched input, i.e. for error messages
pastInput: function() {
var past = this.matched.substr(0, this.matched.length - this.match.length);
return (past.length > 20 ? "..." : "") + past.substr(-20).replace(/\n/g, "");
},
// displays upcoming input, i.e. for error messages
upcomingInput: function() {
var next = this.match;
if (next.length < 20) {
next += this._input.substr(0, 20 - next.length);
}
return (next.substr(0, 20) + (next.length > 20 ? "..." : "")).replace(/\n/g, "");
},
// displays the character position where the lexing error occurred, i.e. for error messages
showPosition: function() {
var pre = this.pastInput();
var c = new Array(pre.length + 1).join("-");
return pre + this.upcomingInput() + "\n" + c + "^";
},
// test the lexed token: return FALSE when not a match, otherwise return token
test_match: function(match, indexed_rule) {
var token, lines, backup;
if (this.options.backtrack_lexer) {
backup = {
yylineno: this.yylineno,
yylloc: {
first_line: this.yylloc.first_line,
last_line: this.last_line,
first_column: this.yylloc.first_column,
last_column: this.yylloc.last_column
},
yytext: this.yytext,
match: this.match,
matches: this.matches,
matched: this.matched,
yyleng: this.yyleng,
offset: this.offset,
_more: this._more,
_input: this._input,
yy: this.yy,
conditionStack: this.conditionStack.slice(0),
done: this.done
};
if (this.options.ranges) {
backup.yylloc.range = this.yylloc.range.slice(0);
}
}
lines = match[0].match(/(?:\r\n?|\n).*/g);
if (lines) {
this.yylineno += lines.length;
}
this.yylloc = {
first_line: this.yylloc.last_line,
last_line: this.yylineno + 1,
first_column: this.yylloc.last_column,
last_column: lines ? lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length : this.yylloc.last_column + match[0].length
};
this.yytext += match[0];
this.match += match[0];
this.matches = match;
this.yyleng = this.yytext.length;
if (this.options.ranges) {
this.yylloc.range = [this.offset, this.offset += this.yyleng];
}
this._more = false;
this._backtrack = false;
this._input = this._input.slice(match[0].length);
this.matched += match[0];
token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1]);
if (this.done && this._input) {
this.done = false;
}
if (token) {
return token;
} else if (this._backtrack) {
for (var k in backup) {
this[k] = backup[k];
}
return false;
}
return false;
},
// return next match in input
next: function() {
if (this.done) {
return this.EOF;
}
if (!this._input) {
this.done = true;
}
var token, match, tempMatch, index;
if (!this._more) {
this.yytext = "";
this.match = "";
}
var rules = this._currentRules();
for (var i = 0; i < rules.length; i++) {
tempMatch = this._input.match(this.rules[rules[i]]);
if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
match = tempMatch;
index = i;
if (this.options.backtrack_lexer) {
token = this.test_match(tempMatch, rules[i]);
if (token !== false) {
return token;
} else if (this._backtrack) {
match = false;
continue;
} else {
return false;
}
} else if (!this.options.flex) {
break;
}
}
}
if (match) {
token = this.test_match(match, rules[index]);
if (token !== false) {
return token;
}
return false;
}
if (this._input === "") {
return this.EOF;
} else {
return this.parseError("Lexical error on line " + (this.yylineno + 1) + ". Unrecognized text.\n" + this.showPosition(), {
text: "",
token: null,
line: this.yylineno
});
}
},
// return next match that has a token
lex: function lex() {
var r = this.next();
if (r) {
return r;
} else {
return this.lex();
}
},
// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
begin: function begin(condition) {
this.conditionStack.push(condition);
},
// pop the previously active lexer condition state off the condition stack
popState: function popState() {
var n = this.conditionStack.length - 1;
if (n > 0) {
return this.conditionStack.pop();
} else {
return this.conditionStack[0];
}
},
// produce the lexer rule set which is active for the currently active lexer condition state
_currentRules: function _currentRules() {
if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules;
} else {
return this.conditions["INITIAL"].rules;
}
},
// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
topState: function topState(n) {
n = this.conditionStack.length - 1 - Math.abs(n || 0);
if (n >= 0) {
return this.conditionStack[n];
} else {
return "INITIAL";
}
},
// alias for begin(condition)
pushState: function pushState(condition) {
this.begin(condition);
},
// return the number of states currently on the stack
stateStackSize: function stateStackSize() {
return this.conditionStack.length;
},
options: {},
performAction: function anonymous(yy, yy_, $avoiding_name_collisions, YY_START) {
switch ($avoiding_name_collisions) {
case 0:
return 52;
case 1:
return 53;
case 2:
return 54;
case 3:
return 55;
case 4:
break;
case 5:
break;
case 6:
this.begin("acc_title");
return 30;
case 7:
this.popState();
return "acc_title_value";
case 8:
this.begin("acc_descr");
return 32;
case 9:
this.popState();
return "acc_descr_value";
case 10:
this.begin("acc_descr_multiline");
break;
case 11:
this.popState();
break;
case 12:
return "acc_descr_multiline_value";
case 13:
return 8;
case 14:
break;
case 15:
return 7;
case 16:
return 7;
case 17:
return "EDGE_STATE";
case 18:
this.begin("callback_name");
break;
case 19:
this.popState();
break;
case 20:
this.popState();
this.begin("callback_args");
break;
case 21:
return 69;
case 22:
this.popState();
break;
case 23:
return 70;
case 24:
this.popState();
break;
case 25:
return "STR";
case 26:
this.begin("string");
break;
case 27:
this.begin("namespace");
return 39;
case 28:
this.popState();
return 8;
case 29:
break;
case 30:
this.begin("namespace-body");
return 36;
case 31:
this.popState();
return 38;
case 32:
return "EOF_IN_STRUCT";
case 33:
return 8;
case 34:
break;
case 35:
return "EDGE_STATE";
case 36:
this.begin("class");
return 43;
case 37:
this.popState();
return 8;
case 38:
break;
case 39:
this.popState();
this.popState();
return 38;
case 40:
this.begin("class-body");
return 36;
case 41:
this.popState();
return 38;
case 42:
return "EOF_IN_STRUCT";
case 43:
return "EDGE_STATE";
case 44:
return "OPEN_IN_STRUCT";
case 45:
break;
case 46:
return "MEMBER";
case 47:
return 72;
case 48:
return 65;
case 49:
return 66;
case 50:
return 68;
case 51:
return 49;
case 52:
return 51;
case 53:
return 44;
case 54:
return 45;
case 55:
return 71;
case 56:
this.popState();
break;
case 57:
return "GENERICTYPE";
case 58:
this.begin("generic");
break;
case 59:
this.popState();
break;
case 60:
return "BQUOTE_STR";
case 61:
this.begin("bqstring");
break;
case 62:
return 67;
case 63:
return 67;
case 64:
return 67;
case 65:
return 67;
case 66:
return 59;
case 67:
return 59;
case 68:
return 61;
case 69:
return 61;
case 70:
return 60;
case 71:
return 58;
case 72:
return 62;
case 73:
return 63;
case 74:
return 64;
case 75:
return 21;
case 76:
return 41;
case 77:
return 84;
case 78:
return "DOT";
case 79:
return "PLUS";
case 80:
return 81;
case 81:
return "EQUALS";
case 82:
return "EQUALS";
case 83:
return 88;
case 84:
return 12;
case 85:
return 14;
case 86:
return "PUNCTUATION";
case 87:
return 87;
case 88:
return 86;
case 89:
return 83;
case 90:
return 9;
}
},
rules: [/^(?:.*direction\s+TB[^\n]*)/, /^(?:.*direction\s+BT[^\n]*)/, /^(?:.*direction\s+RL[^\n]*)/, /^(?:.*direction\s+LR[^\n]*)/, /^(?:%%(?!\{)*[^\n]*(\r?\n?)+)/, /^(?:%%[^\n]*(\r?\n)*)/, /^(?:accTitle\s*:\s*)/, /^(?:(?!\n||)*[^\n]*)/, /^(?:accDescr\s*:\s*)/, /^(?:(?!\n||)*[^\n]*)/, /^(?:accDescr\s*\{\s*)/, /^(?:[\}])/, /^(?:[^\}]*)/, /^(?:\s*(\r?\n)+)/, /^(?:\s+)/, /^(?:classDiagram-v2\b)/, /^(?:classDiagram\b)/, /^(?:\[\*\])/, /^(?:call[\s]+)/, /^(?:\([\s]*\))/, /^(?:\()/, /^(?:[^(]*)/, /^(?:\))/, /^(?:[^)]*)/, /^(?:["])/, /^(?:[^"]*)/, /^(?:["])/, /^(?:namespace\b)/, /^(?:\s*(\r?\n)+)/, /^(?:\s+)/, /^(?:[{])/, /^(?:[}])/, /^(?:$)/, /^(?:\s*(\r?\n)+)/, /^(?:\s+)/, /^(?:\[\*\])/, /^(?:class\b)/, /^(?:\s*(\r?\n)+)/, /^(?:\s+)/, /^(?:[}])/, /^(?:[{])/, /^(?:[}])/, /^(?:$)/, /^(?:\[\*\])/, /^(?:[{])/, /^(?:[\n])/, /^(?:[^{}\n]*)/, /^(?:cssClass\b)/, /^(?:callback\b)/, /^(?:link\b)/, /^(?:click\b)/, /^(?:note for\b)/, /^(?:note\b)/, /^(?:<<)/, /^(?:>>)/, /^(?:href\b)/, /^(?:[~])/, /^(?:[^~]*)/, /^(?:~)/, /^(?:[`])/, /^(?:[^`]+)/, /^(?:[`])/, /^(?:_self\b)/, /^(?:_blank\b)/, /^(?:_parent\b)/, /^(?:_top\b)/, /^(?:\s*<\|)/, /^(?:\s*\|>)/, /^(?:\s*>)/, /^(?:\s*<)/, /^(?:\s*\*)/, /^(?:\s*o\b)/, /^(?:\s*\(\))/, /^(?:--)/, /^(?:\.\.)/, /^(?::{1}[^:\n;]+)/, /^(?::{3})/, /^(?:-)/, /^(?:\.)/, /^(?:\+)/, /^(?:%)/, /^(?:=)/, /^(?:=)/, /^(?:\w+)/, /^(?:\[)/, /^(?:\])/, /^(?:[!"#$%&'*+,-.`?\\/])/, /^(?:[0-9]+)/, /^(?:[\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6]|[\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377]|[\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5]|[\u03F7-\u0481\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA]|[\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE]|[\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA]|[\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0]|[\u08A2-\u08AC\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0977]|[\u0979-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2]|[\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A]|[\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39]|[\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8]|[\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C]|[\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C]|[\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99]|[\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0]|[\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D]|[\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3]|[\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10]|[\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1]|[\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81]|[\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3]|[\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6]|[\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A]|[\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081]|[\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D]|[\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0]|[\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310]|[\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C]|[\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u1700-\u170C\u170E-\u1711]|[\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7]|[\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191C]|[\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16]|[\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF]|[\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC]|[\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D]|[\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D]|[\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3]|[\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F]|[\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128]|[\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2183\u2184]|[\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3]|[\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6]|[\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE]|[\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005\u3006\u3031-\u3035\u303B\u303C]|[\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D]|[\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC]|[\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B]|[\uA640-\uA66E\uA67F-\uA697\uA6A0-\uA6E5\uA717-\uA71F\uA722-\uA788]|[\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA801\uA803-\uA805]|[\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB]|[\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uAA00-\uAA28]|[\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA80-\uAAAF\uAAB1\uAAB5]|[\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4]|[\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E]|[\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D]|[\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36]|[\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D]|[\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC]|[\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF]|[\uFFD2-\uFFD7\uFFDA-\uFFDC])/, /^(?:\s)/, /^(?:$)/],
conditions: { "namespace-body": { "rules": [26, 31, 32, 33, 34, 35, 36, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "namespace": { "rules": [26, 27, 28, 29, 30, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "class-body": { "rules": [26, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "class": { "rules": [26, 37, 38, 39, 40, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "acc_descr_multiline": { "rules": [11, 12, 26, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "acc_descr": { "rules": [9, 26, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "acc_title": { "rules": [7, 26, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "callback_args": { "rules": [22, 23, 26, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "callback_name": { "rules": [19, 20, 21, 26, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "href": { "rules": [26, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "struct": { "rules": [26, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "generic": { "rules": [26, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "bqstring": { "rules": [26, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "string": { "rules": [24, 25, 26, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": false }, "INITIAL": { "rules": [0, 1, 2, 3, 4, 5, 6, 8, 10, 13, 14, 15, 16, 17, 18, 26, 27, 36, 47, 48, 49, 50, 51, 52, 53, 54, 55, 58, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90], "inclusive": true } }
};
return lexer2;
}();
parser2.lexer = lexer;
function Parser() {
this.yy = {};
}
Parser.prototype = parser2;
parser2.Parser = Parser;
return new Parser();
}();
parser.parser = parser;
const parser$1 = parser;
const visibilityValues = ["#", "+", "~", "-", ""];
class ClassMember {
constructor(input, memberType) {
this.memberType = memberType;
this.visibility = "";
this.classifier = "";
const sanitizedInput = (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.d)(input, (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)());
this.parseMember(sanitizedInput);
}
getDisplayDetails() {
let displayText = this.visibility + (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.v)(this.id);
if (this.memberType === "method") {
displayText += `(${(0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.v)(this.parameters.trim())})`;
if (this.returnType) {
displayText += " : " + (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.v)(this.returnType);
}
}
displayText = displayText.trim();
const cssStyle = this.parseClassifier();
return {
displayText,
cssStyle
};
}
parseMember(input) {
let potentialClassifier = "";
if (this.memberType === "method") {
const methodRegEx = /([#+~-])?(.+)\((.*)\)([\s$*])?(.*)([$*])?/;
const match = input.match(methodRegEx);
if (match) {
const detectedVisibility = match[1] ? match[1].trim() : "";
if (visibilityValues.includes(detectedVisibility)) {
this.visibility = detectedVisibility;
}
this.id = match[2].trim();
this.parameters = match[3] ? match[3].trim() : "";
potentialClassifier = match[4] ? match[4].trim() : "";
this.returnType = match[5] ? match[5].trim() : "";
if (potentialClassifier === "") {
const lastChar = this.returnType.substring(this.returnType.length - 1);
if (lastChar.match(/[$*]/)) {
potentialClassifier = lastChar;
this.returnType = this.returnType.substring(0, this.returnType.length - 1);
}
}
}
} else {
const length = input.length;
const firstChar = input.substring(0, 1);
const lastChar = input.substring(length - 1);
if (visibilityValues.includes(firstChar)) {
this.visibility = firstChar;
}
if (lastChar.match(/[*?]/)) {
potentialClassifier = lastChar;
}
this.id = input.substring(
this.visibility === "" ? 0 : 1,
potentialClassifier === "" ? length : length - 1
);
}
this.classifier = potentialClassifier;
}
parseClassifier() {
switch (this.classifier) {
case "*":
return "font-style:italic;";
case "$":
return "text-decoration:underline;";
default:
return "";
}
}
}
const MERMAID_DOM_ID_PREFIX = "classId-";
let relations = [];
let classes = {};
let notes = [];
let classCounter = 0;
let namespaces = {};
let namespaceCounter = 0;
let functions = [];
const sanitizeText = (txt) => _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.e.sanitizeText(txt, (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)());
const splitClassNameAndType = function(_id) {
const id = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.e.sanitizeText(_id, (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)());
let genericType = "";
let className = id;
if (id.indexOf("~") > 0) {
const split = id.split("~");
className = sanitizeText(split[0]);
genericType = sanitizeText(split[1]);
}
return { className, type: genericType };
};
const setClassLabel = function(_id, label) {
const id = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.e.sanitizeText(_id, (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)());
if (label) {
label = sanitizeText(label);
}
const { className } = splitClassNameAndType(id);
classes[className].label = label;
};
const addClass = function(_id) {
const id = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.e.sanitizeText(_id, (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)());
const { className, type } = splitClassNameAndType(id);
if (Object.hasOwn(classes, className)) {
return;
}
const name = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.e.sanitizeText(className, (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)());
classes[name] = {
id: name,
type,
label: name,
cssClasses: [],
methods: [],
members: [],
annotations: [],
domId: MERMAID_DOM_ID_PREFIX + name + "-" + classCounter
};
classCounter++;
};
const lookUpDomId = function(_id) {
const id = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.e.sanitizeText(_id, (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)());
if (id in classes) {
return classes[id].domId;
}
throw new Error("Class not found: " + id);
};
const clear = function() {
relations = [];
classes = {};
notes = [];
functions = [];
functions.push(setupToolTips);
namespaces = {};
namespaceCounter = 0;
(0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.t)();
};
const getClass = function(id) {
return classes[id];
};
const getClasses = function() {
return classes;
};
const getRelations = function() {
return relations;
};
const getNotes = function() {
return notes;
};
const addRelation = function(relation) {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.l.debug("Adding relation: " + JSON.stringify(relation));
addClass(relation.id1);
addClass(relation.id2);
relation.id1 = splitClassNameAndType(relation.id1).className;
relation.id2 = splitClassNameAndType(relation.id2).className;
relation.relationTitle1 = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.e.sanitizeText(relation.relationTitle1.trim(), (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)());
relation.relationTitle2 = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.e.sanitizeText(relation.relationTitle2.trim(), (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)());
relations.push(relation);
};
const addAnnotation = function(className, annotation) {
const validatedClassName = splitClassNameAndType(className).className;
classes[validatedClassName].annotations.push(annotation);
};
const addMember = function(className, member) {
addClass(className);
const validatedClassName = splitClassNameAndType(className).className;
const theClass = classes[validatedClassName];
if (typeof member === "string") {
const memberString = member.trim();
if (memberString.startsWith("<<") && memberString.endsWith(">>")) {
theClass.annotations.push(sanitizeText(memberString.substring(2, memberString.length - 2)));
} else if (memberString.indexOf(")") > 0) {
theClass.methods.push(new ClassMember(memberString, "method"));
} else if (memberString) {
theClass.members.push(new ClassMember(memberString, "attribute"));
}
}
};
const addMembers = function(className, members) {
if (Array.isArray(members)) {
members.reverse();
members.forEach((member) => addMember(className, member));
}
};
const addNote = function(text, className) {
const note = {
id: `note${notes.length}`,
class: className,
text
};
notes.push(note);
};
const cleanupLabel = function(label) {
if (label.startsWith(":")) {
label = label.substring(1);
}
return sanitizeText(label.trim());
};
const setCssClass = function(ids, className) {
ids.split(",").forEach(function(_id) {
let id = _id;
if (_id[0].match(/\d/)) {
id = MERMAID_DOM_ID_PREFIX + id;
}
if (classes[id] !== void 0) {
classes[id].cssClasses.push(className);
}
});
};
const setTooltip = function(ids, tooltip) {
ids.split(",").forEach(function(id) {
if (tooltip !== void 0) {
classes[id].tooltip = sanitizeText(tooltip);
}
});
};
const getTooltip = function(id, namespace) {
if (namespace) {
return namespaces[namespace].classes[id].tooltip;
}
return classes[id].tooltip;
};
const setLink = function(ids, linkStr, target) {
const config = (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)();
ids.split(",").forEach(function(_id) {
let id = _id;
if (_id[0].match(/\d/)) {
id = MERMAID_DOM_ID_PREFIX + id;
}
if (classes[id] !== void 0) {
classes[id].link = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.u.formatUrl(linkStr, config);
if (config.securityLevel === "sandbox") {
classes[id].linkTarget = "_top";
} else if (typeof target === "string") {
classes[id].linkTarget = sanitizeText(target);
} else {
classes[id].linkTarget = "_blank";
}
}
});
setCssClass(ids, "clickable");
};
const setClickEvent = function(ids, functionName, functionArgs) {
ids.split(",").forEach(function(id) {
setClickFunc(id, functionName, functionArgs);
classes[id].haveCallback = true;
});
setCssClass(ids, "clickable");
};
const setClickFunc = function(_domId, functionName, functionArgs) {
const domId = _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.e.sanitizeText(_domId, (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)());
const config = (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)();
if (config.securityLevel !== "loose") {
return;
}
if (functionName === void 0) {
return;
}
const id = domId;
if (classes[id] !== void 0) {
const elemId = lookUpDomId(id);
let argList = [];
if (typeof functionArgs === "string") {
argList = functionArgs.split(/,(?=(?:(?:[^"]*"){2})*[^"]*$)/);
for (let i = 0; i < argList.length; i++) {
let item = argList[i].trim();
if (item.charAt(0) === '"' && item.charAt(item.length - 1) === '"') {
item = item.substr(1, item.length - 2);
}
argList[i] = item;
}
}
if (argList.length === 0) {
argList.push(elemId);
}
functions.push(function() {
const elem = document.querySelector(`[id="${elemId}"]`);
if (elem !== null) {
elem.addEventListener(
"click",
function() {
_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.u.runFunc(functionName, ...argList);
},
false
);
}
});
}
};
const bindFunctions = function(element) {
functions.forEach(function(fun) {
fun(element);
});
};
const lineType = {
LINE: 0,
DOTTED_LINE: 1
};
const relationType = {
AGGREGATION: 0,
EXTENSION: 1,
COMPOSITION: 2,
DEPENDENCY: 3,
LOLLIPOP: 4
};
const setupToolTips = function(element) {
let tooltipElem = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(".mermaidTooltip");
if ((tooltipElem._groups || tooltipElem)[0][0] === null) {
tooltipElem = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)("body").append("div").attr("class", "mermaidTooltip").style("opacity", 0);
}
const svg = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(element).select("svg");
const nodes = svg.selectAll("g.node");
nodes.on("mouseover", function() {
const el = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(this);
const title = el.attr("title");
if (title === null) {
return;
}
const rect = this.getBoundingClientRect();
tooltipElem.transition().duration(200).style("opacity", ".9");
tooltipElem.text(el.attr("title")).style("left", window.scrollX + rect.left + (rect.right - rect.left) / 2 + "px").style("top", window.scrollY + rect.top - 14 + document.body.scrollTop + "px");
tooltipElem.html(tooltipElem.html().replace(/<br\/>/g, "
"));
el.classed("hover", true);
}).on("mouseout", function() {
tooltipElem.transition().duration(500).style("opacity", 0);
const el = (0,d3__WEBPACK_IMPORTED_MODULE_0__/* .select */ .Ys)(this);
el.classed("hover", false);
});
};
functions.push(setupToolTips);
let direction = "TB";
const getDirection = () => direction;
const setDirection = (dir) => {
direction = dir;
};
const addNamespace = function(id) {
if (namespaces[id] !== void 0) {
return;
}
namespaces[id] = {
id,
classes: {},
children: {},
domId: MERMAID_DOM_ID_PREFIX + id + "-" + namespaceCounter
};
namespaceCounter++;
};
const getNamespace = function(name) {
return namespaces[name];
};
const getNamespaces = function() {
return namespaces;
};
const addClassesToNamespace = function(id, classNames) {
if (namespaces[id] !== void 0) {
classNames.map((className) => {
classes[className].parent = id;
namespaces[id].classes[className] = classes[className];
});
}
};
const db = {
setAccTitle: _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.s,
getAccTitle: _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.g,
getAccDescription: _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.a,
setAccDescription: _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.b,
getConfig: () => (0,_mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.c)().class,
addClass,
bindFunctions,
clear,
getClass,
getClasses,
getNotes,
addAnnotation,
addNote,
getRelations,
addRelation,
getDirection,
setDirection,
addMember,
addMembers,
cleanupLabel,
lineType,
relationType,
setClickEvent,
setCssClass,
setLink,
getTooltip,
setTooltip,
lookUpDomId,
setDiagramTitle: _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.q,
getDiagramTitle: _mermaid_934d9bea_js__WEBPACK_IMPORTED_MODULE_1__.r,
setClassLabel,
addNamespace,
addClassesToNamespace,
getNamespace,
getNamespaces
};
const getStyles = (options) => `g.classGroup text {
fill: ${options.nodeBorder || options.classText};
stroke: none;
font-family: ${options.fontFamily};
font-size: 10px;
.title {
font-weight: bolder;
}
}
.nodeLabel, .edgeLabel {
color: ${options.classText};
}
.edgeLabel .label rect {
fill: ${options.mainBkg};
}
.label text {
fill: ${options.classText};
}
.edgeLabel .label span {
background: ${options.mainBkg};
}
.classTitle {
font-weight: bolder;
}
.node rect,
.node circle,
.node ellipse,
.node polygon,
.node path {
fill: ${options.mainBkg};
stroke: ${options.nodeBorder};
stroke-width: 1px;
}
.divider {
stroke: ${options.nodeBorder};
stroke-width: 1;
}
g.clickable {
cursor: pointer;
}
g.classGroup rect {
fill: ${options.mainBkg};
stroke: ${options.nodeBorder};
}
g.classGroup line {
stroke: ${options.nodeBorder};
stroke-width: 1;
}
.classLabel .box {
stroke: none;
stroke-width: 0;
fill: ${options.mainBkg};
opacity: 0.5;
}
.classLabel .label {
fill: ${options.nodeBorder};
font-size: 10px;
}
.relation {
stroke: ${options.lineColor};
stroke-width: 1;
fill: none;
}
.dashed-line{
stroke-dasharray: 3;
}
.dotted-line{
stroke-dasharray: 1 2;
}
#compositionStart, .composition {
fill: ${options.lineColor} !important;
stroke: ${options.lineColor} !important;
stroke-width: 1;
}
#compositionEnd, .composition {
fill: ${options.lineColor} !important;
stroke: ${options.lineColor} !important;
stroke-width: 1;
}
#dependencyStart, .dependency {
fill: ${options.lineColor} !important;
stroke: ${options.lineColor} !important;
stroke-width: 1;
}
#dependencyStart, .dependency {
fill: ${options.lineColor} !important;
stroke: ${options.lineColor} !important;
stroke-width: 1;
}
#extensionStart, .extension {
fill: transparent !important;
stroke: ${options.lineColor} !important;
stroke-width: 1;
}
#extensionEnd, .extension {
fill: transparent !important;
stroke: ${options.lineColor} !important;
stroke-width: 1;
}
#aggregationStart, .aggregation {
fill: transparent !important;
stroke: ${options.lineColor} !important;
stroke-width: 1;
}
#aggregationEnd, .aggregation {
fill: transparent !important;
stroke: ${options.lineColor} !important;
stroke-width: 1;
}
#lollipopStart, .lollipop {
fill: ${options.mainBkg} !important;
stroke: ${options.lineColor} !important;
stroke-width: 1;
}
#lollipopEnd, .lollipop {
fill: ${options.mainBkg} !important;
stroke: ${options.lineColor} !important;
stroke-width: 1;
}
.edgeTerminals {
font-size: 11px;
}
.classTitleText {
text-anchor: middle;
font-size: 18px;
fill: ${options.textColor};
}
`;
const styles = getStyles;
/***/ })
};
;