12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182 |
- 'use strict'
- Object.defineProperty(exports, '__esModule', {value: true})
- var assert = require('assert')
- var codes = require('../character/codes.js')
- var markdownLineEnding = require('../character/markdown-line-ending.js')
- var types = require('../constant/types.js')
- var content = require('../tokenize/content.js')
- var factorySpace = require('../tokenize/factory-space.js')
- var partialBlankLine = require('../tokenize/partial-blank-line.js')
- function _interopDefaultLegacy(e) {
- return e && typeof e === 'object' && 'default' in e ? e : {default: e}
- }
- var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
- var tokenize = initializeFlow
- function initializeFlow(effects) {
- var self = this
- var initial = effects.attempt(
- // Try to parse a blank line.
- partialBlankLine,
- atBlankEnding,
- // Try to parse initial flow (essentially, only code).
- effects.attempt(
- this.parser.constructs.flowInitial,
- afterConstruct,
- factorySpace(
- effects,
- effects.attempt(
- this.parser.constructs.flow,
- afterConstruct,
- effects.attempt(content, afterConstruct)
- ),
- types.linePrefix
- )
- )
- )
- return initial
- function atBlankEnding(code) {
- assert__default['default'](
- code === codes.eof || markdownLineEnding(code),
- 'expected eol or eof'
- )
- if (code === codes.eof) {
- effects.consume(code)
- return
- }
- effects.enter(types.lineEndingBlank)
- effects.consume(code)
- effects.exit(types.lineEndingBlank)
- self.currentConstruct = undefined
- return initial
- }
- function afterConstruct(code) {
- assert__default['default'](
- code === codes.eof || markdownLineEnding(code),
- 'expected eol or eof'
- )
- if (code === codes.eof) {
- effects.consume(code)
- return
- }
- effects.enter(types.lineEnding)
- effects.consume(code)
- effects.exit(types.lineEnding)
- self.currentConstruct = undefined
- return initial
- }
- }
- exports.tokenize = tokenize
|