diff --git a/benchmark/async.js b/benchmark/async.js new file mode 100644 index 000000000..e511a3b4f --- /dev/null +++ b/benchmark/async.js @@ -0,0 +1,65 @@ +const asciidoctor = require('../build/asciidoctor-node.js')() +const runs = process.env.RUNS || 5 + +const run = async (label, fn) => { + console.time(label) + const resultAsyncPromises = [] + for (let i = 0; i < runs; i++) { + resultAsyncPromises.push(fn()) + } + const resultAsync = await Promise.all(resultAsyncPromises) + console.log(resultAsync.length) + console.timeEnd(label) +} + +const includeBaseDirectory = `${__dirname}/fixtures/includes` +const localIncludeInput = ` +include::${includeBaseDirectory}/1.adoc[] +include::${includeBaseDirectory}/2.adoc[] +include::${includeBaseDirectory}/3.adoc[] +include::${includeBaseDirectory}/4.adoc[] +include::${includeBaseDirectory}/5.adoc[] +include::${includeBaseDirectory}/6.adoc[] +include::${includeBaseDirectory}/7.adoc[] +include::${includeBaseDirectory}/8.adoc[] +include::${includeBaseDirectory}/9.adoc[] +include::${includeBaseDirectory}/10.adoc[] +include::${includeBaseDirectory}/11.adoc[] +include::${includeBaseDirectory}/12.adoc[] +include::${includeBaseDirectory}/13.adoc[] +include::${includeBaseDirectory}/14.adoc[] +include::${includeBaseDirectory}/15.adoc[] +include::${includeBaseDirectory}/16.adoc[] +include::${includeBaseDirectory}/17.adoc[] +include::${includeBaseDirectory}/18.adoc[] +include::${includeBaseDirectory}/19.adoc[] +include::${includeBaseDirectory}/20.adoc[] +` + +const remoteIncludeInput = ` +include::https://raw.githubusercontent.com/asciidoctor/asciidoctor.js/master/README.adoc[] +include::https://raw.githubusercontent.com/asciidoctor/asciidoctor.js/master/README.adoc[] +include::https://raw.githubusercontent.com/asciidoctor/asciidoctor.js/master/README.adoc[] +` + +;(async () => { + console.log('warmup...') + for (let i = 0; i < 100; i++) { + const doc = asciidoctor.load(localIncludeInput, { safe: 'safe' }) + doc.convert({ safe: 'safe' }) + await asciidoctor.convertAsync(localIncludeInput, { safe: 'safe' }) + } + await run('(local include) - convert', () => { + const doc = asciidoctor.load(localIncludeInput, { safe: 'safe' }) + return doc.convert({ safe: 'safe' }) + }) + await run('(local include) - convertAsync', async () => await asciidoctor.convertAsync(localIncludeInput, { safe: 'safe' })) + await run('(local include) - convertAsync-Promise.all', () => asciidoctor.convertAsync(localIncludeInput, { safe: 'safe' })) + + await run('(remote include) - convert', () => { + const doc = asciidoctor.load(remoteIncludeInput, { safe: 'safe', attributes: { 'allow-uri-read': true } }) + return doc.convert({ safe: 'safe' }) + }) + await run('(remote include) - convertAsync', async () => await asciidoctor.convertAsync(remoteIncludeInput, { safe: 'safe', attributes: { 'allow-uri-read': true } })) + await run('(remote include) - convertAsync-Promise.all', () => asciidoctor.convertAsync(remoteIncludeInput, { safe: 'safe', attributes: { 'allow-uri-read': true } })) +})() diff --git a/benchmark/fixtures/includes/1.adoc b/benchmark/fixtures/includes/1.adoc new file mode 100644 index 000000000..d00491fd7 --- /dev/null +++ b/benchmark/fixtures/includes/1.adoc @@ -0,0 +1 @@ +1 diff --git a/benchmark/fixtures/includes/10.adoc b/benchmark/fixtures/includes/10.adoc new file mode 100644 index 000000000..f599e28b8 --- /dev/null +++ b/benchmark/fixtures/includes/10.adoc @@ -0,0 +1 @@ +10 diff --git a/benchmark/fixtures/includes/11.adoc b/benchmark/fixtures/includes/11.adoc new file mode 100644 index 000000000..b4de39476 --- /dev/null +++ b/benchmark/fixtures/includes/11.adoc @@ -0,0 +1 @@ +11 diff --git a/benchmark/fixtures/includes/12.adoc b/benchmark/fixtures/includes/12.adoc new file mode 100644 index 000000000..48082f72f --- /dev/null +++ b/benchmark/fixtures/includes/12.adoc @@ -0,0 +1 @@ +12 diff --git a/benchmark/fixtures/includes/13.adoc b/benchmark/fixtures/includes/13.adoc new file mode 100644 index 000000000..b1bd38b62 --- /dev/null +++ b/benchmark/fixtures/includes/13.adoc @@ -0,0 +1 @@ +13 diff --git a/benchmark/fixtures/includes/14.adoc b/benchmark/fixtures/includes/14.adoc new file mode 100644 index 000000000..8351c1939 --- /dev/null +++ b/benchmark/fixtures/includes/14.adoc @@ -0,0 +1 @@ +14 diff --git a/benchmark/fixtures/includes/15.adoc b/benchmark/fixtures/includes/15.adoc new file mode 100644 index 000000000..60d3b2f4a --- /dev/null +++ b/benchmark/fixtures/includes/15.adoc @@ -0,0 +1 @@ +15 diff --git a/benchmark/fixtures/includes/16.adoc b/benchmark/fixtures/includes/16.adoc new file mode 100644 index 000000000..b6a7d89c6 --- /dev/null +++ b/benchmark/fixtures/includes/16.adoc @@ -0,0 +1 @@ +16 diff --git a/benchmark/fixtures/includes/17.adoc b/benchmark/fixtures/includes/17.adoc new file mode 100644 index 000000000..98d9bcb75 --- /dev/null +++ b/benchmark/fixtures/includes/17.adoc @@ -0,0 +1 @@ +17 diff --git a/benchmark/fixtures/includes/18.adoc b/benchmark/fixtures/includes/18.adoc new file mode 100644 index 000000000..3c032078a --- /dev/null +++ b/benchmark/fixtures/includes/18.adoc @@ -0,0 +1 @@ +18 diff --git a/benchmark/fixtures/includes/19.adoc b/benchmark/fixtures/includes/19.adoc new file mode 100644 index 000000000..d6b24041c --- /dev/null +++ b/benchmark/fixtures/includes/19.adoc @@ -0,0 +1 @@ +19 diff --git a/benchmark/fixtures/includes/2.adoc b/benchmark/fixtures/includes/2.adoc new file mode 100644 index 000000000..0cfbf0888 --- /dev/null +++ b/benchmark/fixtures/includes/2.adoc @@ -0,0 +1 @@ +2 diff --git a/benchmark/fixtures/includes/20.adoc b/benchmark/fixtures/includes/20.adoc new file mode 100644 index 000000000..209e3ef4b --- /dev/null +++ b/benchmark/fixtures/includes/20.adoc @@ -0,0 +1 @@ +20 diff --git a/benchmark/fixtures/includes/3.adoc b/benchmark/fixtures/includes/3.adoc new file mode 100644 index 000000000..00750edc0 --- /dev/null +++ b/benchmark/fixtures/includes/3.adoc @@ -0,0 +1 @@ +3 diff --git a/benchmark/fixtures/includes/4.adoc b/benchmark/fixtures/includes/4.adoc new file mode 100644 index 000000000..b8626c4cf --- /dev/null +++ b/benchmark/fixtures/includes/4.adoc @@ -0,0 +1 @@ +4 diff --git a/benchmark/fixtures/includes/5.adoc b/benchmark/fixtures/includes/5.adoc new file mode 100644 index 000000000..7ed6ff82d --- /dev/null +++ b/benchmark/fixtures/includes/5.adoc @@ -0,0 +1 @@ +5 diff --git a/benchmark/fixtures/includes/6.adoc b/benchmark/fixtures/includes/6.adoc new file mode 100644 index 000000000..1e8b31496 --- /dev/null +++ b/benchmark/fixtures/includes/6.adoc @@ -0,0 +1 @@ +6 diff --git a/benchmark/fixtures/includes/7.adoc b/benchmark/fixtures/includes/7.adoc new file mode 100644 index 000000000..7f8f011eb --- /dev/null +++ b/benchmark/fixtures/includes/7.adoc @@ -0,0 +1 @@ +7 diff --git a/benchmark/fixtures/includes/8.adoc b/benchmark/fixtures/includes/8.adoc new file mode 100644 index 000000000..45a4fb75d --- /dev/null +++ b/benchmark/fixtures/includes/8.adoc @@ -0,0 +1 @@ +8 diff --git a/benchmark/fixtures/includes/9.adoc b/benchmark/fixtures/includes/9.adoc new file mode 100644 index 000000000..ec635144f --- /dev/null +++ b/benchmark/fixtures/includes/9.adoc @@ -0,0 +1 @@ +9 diff --git a/lib/asciidoctor/js/opal_ext/reader.rb b/lib/asciidoctor/js/opal_ext/reader.rb new file mode 100644 index 000000000..b1898fdd5 --- /dev/null +++ b/lib/asciidoctor/js/opal_ext/reader.rb @@ -0,0 +1,209 @@ +module Asciidoctor +# Utility methods extracted from the "preprocess_include_directive" method +# https://github.com/asciidoctor/asciidoctor/blob/ebb05a60ff7b4d61655d9d4ec33e5e0100f17de8/lib/asciidoctor/reader.rb#L867 +class PreprocessorReader < Reader + # Internal: Preprocess the directive to include lines from another document. + # + # Preprocess the directive to include the target document. The scenarios + # are as follows: + # + # If SafeMode is SECURE or greater, the directive is ignore and the include + # directive line is emitted verbatim. + # + # Otherwise, if an include processor is specified pass the target and + # attributes to that processor and expect an Array of String lines in return. + # + # Otherwise, if the max depth is greater than 0, and is not exceeded by the + # stack size, normalize the target path and read the lines onto the beginning + # of the Array of source data. + # + # If none of the above apply, emit the include directive line verbatim. + # + # target - The unsubstituted String name of the target document to include as specified in the + # target slot of the include directive. + # attrlist - An attribute list String, which is the text between the square brackets of the + # include directive. + # + # Returns a [Boolean] indicating whether the line under the cursor was changed. To skip over the + # directive, call shift and return true. + def preprocess_include_directive target, attrlist + doc = @document + if ((expanded_target = target).include? ATTR_REF_HEAD) && + (expanded_target = doc.sub_attributes target, :attribute_missing => 'drop-line').empty? + shift + if (doc.attributes['attribute-missing'] || Compliance.attribute_missing) == 'skip' + unshift %(Unresolved directive in #{@path} - include::#{target}[#{attrlist}]) + end + true + elsif include_processors? && (ext = @include_processor_extensions.find {|candidate| candidate.instance.handles? expanded_target }) + shift + # FIXME parse attributes only if requested by extension + ext.process_method[doc, self, expanded_target, (doc.parse_attributes attrlist, [], :sub_input => true)] + true + # if running in SafeMode::SECURE or greater, don't process this directive + # however, be friendly and at least make it a link to the source document + elsif doc.safe >= SafeMode::SECURE + # FIXME we don't want to use a link macro if we are in a verbatim context + replace_next_line %(link:#{expanded_target}[]) + elsif (abs_maxdepth = @maxdepth[:abs]) > 0 + if @include_stack.size >= abs_maxdepth + logger.error message_with_context %(maximum include depth of #{@maxdepth[:rel]} exceeded), :source_location => cursor + return + end + + parsed_attrs = doc.parse_attributes attrlist, [], :sub_input => true + inc_path, target_type, relpath = resolve_include_path expanded_target, attrlist, parsed_attrs + return inc_path unless target_type + + inc_linenos = inc_tags = nil + if attrlist + if parsed_attrs.key? 'lines' + inc_linenos = [] + (split_delimited_value parsed_attrs['lines']).each do |linedef| + if linedef.include? '..' + from, to = linedef.split '..', 2 + inc_linenos += (to.empty? || (to = to.to_i) < 0) ? [from.to_i, 1.0/0.0] : ::Range.new(from.to_i, to).to_a + else + inc_linenos << linedef.to_i + end + end + inc_linenos = inc_linenos.empty? ? nil : inc_linenos.sort.uniq + elsif parsed_attrs.key? 'tag' + unless (tag = parsed_attrs['tag']).empty? || tag == '!' + inc_tags = (tag.start_with? '!') ? { (tag.slice 1, tag.length) => false } : { tag => true } + end + elsif parsed_attrs.key? 'tags' + inc_tags = {} + (split_delimited_value parsed_attrs['tags']).each do |tagdef| + if tagdef.start_with? '!' + inc_tags[tagdef.slice 1, tagdef.length] = false + else + inc_tags[tagdef] = true + end unless tagdef.empty? || tagdef == '!' + end + inc_tags = nil if inc_tags.empty? + end + end + + if inc_linenos + inc_lines, inc_offset, inc_lineno = [], nil, 0 + begin + select_remaining = nil + read_include_content(inc_path, target_type).each_line do |l| + inc_lineno += 1 + if select_remaining || (::Float === (select = inc_linenos[0]) && (select_remaining = select.infinite?)) + # NOTE record line where we started selecting + inc_offset ||= inc_lineno + inc_lines << l + else + if select == inc_lineno + # NOTE record line where we started selecting + inc_offset ||= inc_lineno + inc_lines << l + inc_linenos.shift + end + break if inc_linenos.empty? + end + end + rescue + logger.error message_with_context %(include #{target_type} not readable: #{inc_path}), :source_location => cursor + return replace_next_line %(Unresolved directive in #{@path} - include::#{expanded_target}[#{attrlist}]) + end + shift + # FIXME not accounting for skipped lines in reader line numbering + if inc_offset + parsed_attrs['partial-option'] = true + push_include inc_lines, inc_path, relpath, inc_offset, parsed_attrs + end + elsif inc_tags + inc_lines, inc_offset, inc_lineno, tag_stack, tags_used, active_tag = [], nil, 0, [], ::Set.new, nil + if inc_tags.key? '**' + if inc_tags.key? '*' + select = base_select = (inc_tags.delete '**') + wildcard = inc_tags.delete '*' + else + select = base_select = wildcard = (inc_tags.delete '**') + end + else + select = base_select = !(inc_tags.value? true) + wildcard = inc_tags.delete '*' + end + begin + dbl_co, dbl_sb = '::', '[]' + encoding = ::Encoding::UTF_8 if COERCE_ENCODING + read_include_content(inc_path, target_type).each_line do |l| + inc_lineno += 1 + # must force encoding since we're performing String operations on line + l.force_encoding encoding if encoding + if (l.include? dbl_co) && (l.include? dbl_sb) && TagDirectiveRx =~ l + if $1 # end tag + if (this_tag = $2) == active_tag + tag_stack.pop + active_tag, select = tag_stack.empty? ? [nil, base_select] : tag_stack[-1] + elsif inc_tags.key? this_tag + include_cursor = create_include_cursor inc_path, expanded_target, inc_lineno + if (idx = tag_stack.rindex {|key, _| key == this_tag}) + idx == 0 ? tag_stack.shift : (tag_stack.delete_at idx) + logger.warn message_with_context %(mismatched end tag (expected '#{active_tag}' but found '#{this_tag}') at line #{inc_lineno} of include #{target_type}: #{inc_path}), :source_location => cursor, :include_location => include_cursor + else + logger.warn message_with_context %(unexpected end tag '#{this_tag}' at line #{inc_lineno} of include #{target_type}: #{inc_path}), :source_location => cursor, :include_location => include_cursor + end + end + elsif inc_tags.key?(this_tag = $2) + tags_used << this_tag + # QUESTION should we prevent tag from being selected when enclosing tag is excluded? + tag_stack << [(active_tag = this_tag), (select = inc_tags[this_tag]), inc_lineno] + elsif !wildcard.nil? + select = active_tag && !select ? false : wildcard + tag_stack << [(active_tag = this_tag), select, inc_lineno] + end + elsif select + # NOTE record the line where we started selecting + inc_offset ||= inc_lineno + inc_lines << l + end + end + rescue + logger.error message_with_context %(include #{target_type} not readable: #{inc_path}), :source_location => cursor + return replace_next_line %(Unresolved directive in #{@path} - include::#{expanded_target}[#{attrlist}]) + end + unless tag_stack.empty? + tag_stack.each do |tag_name, _, tag_lineno| + logger.warn message_with_context %(detected unclosed tag '#{tag_name}' starting at line #{tag_lineno} of include #{target_type}: #{inc_path}), :source_location => cursor, :include_location => (create_include_cursor inc_path, expanded_target, tag_lineno) + end + end + unless (missing_tags = inc_tags.keys.to_a - tags_used.to_a).empty? + logger.warn message_with_context %(tag#{missing_tags.size > 1 ? 's' : ''} '#{missing_tags.join ', '}' not found in include #{target_type}: #{inc_path}), :source_location => cursor + end + shift + if inc_offset + parsed_attrs['partial-option'] = true unless base_select && wildcard && inc_tags.empty? + # FIXME not accounting for skipped lines in reader line numbering + push_include inc_lines, inc_path, relpath, inc_offset, parsed_attrs + end + else + begin + # NOTE read content first so that we only advance cursor if IO operation succeeds + inc_content = read_include_content inc_path, target_type + shift + push_include inc_content, inc_path, relpath, 1, parsed_attrs + rescue + logger.error message_with_context %(include #{target_type} not readable: #{inc_path}), :source_location => cursor + return replace_next_line %(Unresolved directive in #{@path} - include::#{expanded_target}[#{attrlist}]) + end + end + true + end + end + + # If a VFS is defined, Asciidoctor will use it to resolve the include target. + # Otherwise use the file system or the network to read the file. + def read_include_content inc_path, target_type + if (vfs = @document.options['vfs']) && (content = vfs[inc_path]) + content + else + target_type == :file ? ::File.open(inc_path, 'rb') {|f| f.read } : open(inc_path, 'rb') {|f| f.read } + end + end +end +end diff --git a/lib/asciidoctor/js/postscript.rb b/lib/asciidoctor/js/postscript.rb index 4097fd06d..6ee5fffb6 100644 --- a/lib/asciidoctor/js/postscript.rb +++ b/lib/asciidoctor/js/postscript.rb @@ -1,6 +1,7 @@ require 'asciidoctor/converter/composite' require 'asciidoctor/converter/html5' require 'asciidoctor/extensions' +require 'asciidoctor/js/opal_ext/reader' if JAVASCRIPT_IO_MODULE == 'xmlhttprequest' require 'asciidoctor/js/opal_ext/browser/reader' diff --git a/spec/fixtures/includes/1.adoc b/spec/fixtures/includes/1.adoc new file mode 100644 index 000000000..d00491fd7 --- /dev/null +++ b/spec/fixtures/includes/1.adoc @@ -0,0 +1 @@ +1 diff --git a/spec/fixtures/includes/2.adoc b/spec/fixtures/includes/2.adoc new file mode 100644 index 000000000..0cfbf0888 --- /dev/null +++ b/spec/fixtures/includes/2.adoc @@ -0,0 +1 @@ +2 diff --git a/spec/node/asciidoctor.spec.js b/spec/node/asciidoctor.spec.js index 4009649cd..f89dfd83d 100644 --- a/spec/node/asciidoctor.spec.js +++ b/spec/node/asciidoctor.spec.js @@ -1557,6 +1557,68 @@ In other words, it’s about discovering writing zen.` }) }) + describe('Async convert', () => { + it('should resolve the inline conditional include if the condition is true', async () => { + const input = `:include1: +ifdef::include1[include::spec/fixtures/includes/1.adoc[]] +include::spec/fixtures/includes/2.adoc[] +` + expect(await asciidoctor.convertAsync(input, { safe: 'safe' })).to.contain(`

1 +2

`) + }) + + it('should not throw an exception if the target is not readable', async () => { + expect(async () => asciidoctor.convertAsync('include::404.adoc[]', { safe: 'safe' })).to.not.throw() + }) + + it('should resolve the conditional include if the condition is true', async () => { + const input = `:include1: +ifdef::include1[] +include::spec/fixtures/includes/1.adoc[] +endif::[] +include::spec/fixtures/includes/2.adoc[] +` + expect(await asciidoctor.convertAsync(input, { safe: 'safe' })).to.contain(`

1 +2

`) + }) + + it('should not resolve the inline conditional include if the condition is false', async () => { + const input = ` +ifdef::include1[include::spec/fixtures/includes/1.adoc[]] +include::spec/fixtures/includes/2.adoc[] +` + expect(await asciidoctor.convertAsync(input, { safe: 'safe' })).to.contain('

2

') + }) + + it('should not resolve the conditional include if the condition is false', async () => { + const input = ` +ifdef::include1[] +include::spec/fixtures/includes/1.adoc[] +endif::[] +include::spec/fixtures/includes/2.adoc[] +` + expect(await asciidoctor.convertAsync(input, { safe: 'safe' })).to.contain('

2

') + }) + + it('should ignore escaped include directive', async () => { + const input = ` +\\include::spec/fixtures/includes/1.adoc[] +` + const result = await asciidoctor.convertAsync(input, { safe: 'safe' }) + expect(result).to.contain('include::spec/fixtures/includes/1.adoc[]') + }) + + it('should ignore comment block', async () => { + const input = ` +//// +include::spec/fixtures/includes/1.adoc[] +include::spec/fixtures/includes/2.adoc[] +//// +` + expect(await asciidoctor.convertAsync(input, { safe: 'safe' })).to.contain('') + }) + }) + if (isWin && process.env.APPVEYOR_BUILD_FOLDER) { describe('Windows', () => { it('should register a custom converter', () => { diff --git a/src/asciidoctor-core-node-api.js b/src/asciidoctor-core-node-api.js new file mode 100644 index 000000000..d189086dc --- /dev/null +++ b/src/asciidoctor-core-node-api.js @@ -0,0 +1,239 @@ +/* global Opal, Asciidoctor, toHash, fromHash, prepareOptions */ +const fs = require('fs') +const path = require('path') +const http = require('http') +const https = require('https') + +const ConditionalDirectiveRx = /^(\\)?(ifdef|ifndef|ifeval|endif)::(\S*?(?:([,+])\S*?)?)\[(.+)?]$/ +const IncludeDirectiveRx = /^(\\)?include::([^[][^[]*)\[(.+)?]$/ +const LF = '\n' +const ASCIIDOC_EXTENSIONS = { + '.adoc': true, + '.asciidoc': true, + '.asc': true, + '.ad': true +} + +const readFile = (fileName) => { + return new Promise(function (resolve, reject) { + fs.readFile(fileName, 'utf8', (err, data) => { + err ? reject(err) : resolve(data) + }) + }) +} + +const getHttp = (uri) => { + const httpModule = uri.startsWith('https://') ? https : http + return new Promise(function (resolve, reject) { + httpModule.get(uri, (res) => { + const { statusCode } = res + if (statusCode !== 200) { + res.resume() + reject(new Error(`Unable to get content from ${uri}. Status code: ${statusCode}.`)) + return + } + res.setEncoding('utf8') + let data = '' + res.on('data', (chunk) => { data += chunk }) + res.on('end', () => { + resolve(data) + }) + }).on('error', (e) => { + reject(e) + }) + }) +} + +const readHttp = (uri) => getHttp(uri) +const readHttps = (uri) => getHttp(uri) + +const resolveSafeMode = (options) => { + if (!options) { + return 20 // secure + } + // safely resolve the safe mode from const, int or string + const safeMode = options.safe + if (!safeMode) { + return 20 // secure + } + if (typeof safeMode === 'number') { + // be permissive in case API user wants to define new levels + return safeMode + } + if (typeof safeMode === 'string') { + const safeModes = { + 'unsafe': 0, + 'safe': 1, + 'server': 10, + 'secure': 20 + } + const result = safeModes[safeMode.toLocaleLowerCase()] + if (result) { + return result + } + } + return 20 // secure +} + +const hasIncludeProcessorExtensions = (includeProcessors, target) => { + if (includeProcessors && includeProcessors.length > 0) { + for (let i = 0; i < includeProcessors.length; i++) { + if (includeProcessors[i].handles(target)) { + return true + } + } + } + return false +} + +const getBaseDir = (options) => { + if (options && options.base_dir) { + return path.resolve(options.base_dir) + } + return process.cwd().split(path.sep).join(path.posix.sep) +} + +const resolveIncludePath = (target, baseDir) => { + if (target.startsWith('http://')) { + return { path: target, type: 'http' } + } + if (target.startsWith('https://')) { + return { path: target, type: 'https' } + } + let result = target + if (result.startsWith('file://')) { + result = result.substring('file://'.length) + } + if (!path.isAbsolute(result)) { + return { path: path.join(baseDir, result), type: 'file' } + } else { + return { path: result, type: 'file' } + } +} + +const readIncludeTarget = async (includeTarget, options) => { + const targetPath = includeTarget.path + try { + const attributes = fromHash(options.attributes) + if (includeTarget.type === 'file') { + return await readFile(targetPath) + } else if (includeTarget.type === 'http') { + if (attributes && attributes['allow-uri-read']) { + return await readHttp(targetPath) + } + return '' // bare link + } else if (includeTarget.type === 'https') { + if (attributes && attributes['allow-uri-read']) { + return await readHttps(targetPath) + } + return '' // bare link + } else { + console.log(`Unsupported target type ${includeTarget.type}, ignoring the include directive`) + return `unsupported target type ${includeTarget.type}` + } + } catch (error) { + console.log(error.message) + return `unable to read ${targetPath}` + } +} + +const recursivelyProcessUnconditionalInclude = async (doc, baseDir, content, vfs) => { + const lines = content.split(LF) + const options = doc.getOptions() + const exts = doc.getParentDocument() ? undefined : doc.getExtensions() + const includeProcessors = exts ? exts.getIncludeProcessors() : undefined + const linesLength = lines.length + let commentBlockTerminator + for (let i = 0; i < linesLength; i++) { + // TODO ignore ifndef, ifdef, ifeval until an endif is found + const line = lines[i] + if (commentBlockTerminator && line === commentBlockTerminator) { + // comment block ends + commentBlockTerminator = undefined + continue + } else { + if (line.startsWith('///')) { + if (line.length > 3 && line === '/'.repeat(line.length)) { + // comment block starts + commentBlockTerminator = line + continue + } + } + } + if (line.endsWith(']') && !line.startsWith('[') && line.includes('::')) { + const conditionDirectiveMatch = ConditionalDirectiveRx.exec(line) + if (line.includes('if') && conditionDirectiveMatch[0] !== null) { + continue // we can't evaluate conditional include directive at this stage + } + const includeDirectiveMatch = IncludeDirectiveRx.exec(line) + if ((line.startsWith('inc') || line.startsWith('\\inc')) && includeDirectiveMatch[0] !== null) { + if (includeDirectiveMatch[1] === '\\') { + continue // we can't evaluate escaped include directive at this stage + } + const target = includeDirectiveMatch[2] + if (hasIncludeProcessorExtensions(includeProcessors, target)) { + continue // we can't evaluate include processor at this stage + } + const resolvedIncludeTarget = resolveIncludePath(target, baseDir) + const targetPath = resolvedIncludeTarget.path + if (!vfs[targetPath]) { + const content = await readIncludeTarget(resolvedIncludeTarget, options) + vfs[targetPath] = content + if (ASCIIDOC_EXTENSIONS[path.extname(path.basename(targetPath))]) { + await recursivelyProcessUnconditionalInclude(doc, path.dirname(targetPath), content, vfs) + } + } + } + } + } +} + +const processUnconditionalInclude = async (doc, input, vfs) => { + const options = doc.getOptions() + await recursivelyProcessUnconditionalInclude(doc, getBaseDir(options), input, vfs) +} + +/** + * /!\ Highly experimental API /!\ + */ +Asciidoctor.prototype.convertAsync = async function (input, options) { + options = options || {} + options.parse = false + if (typeof input === 'object' && input.constructor.name === 'Buffer') { + input = input.toString('utf8') + } + let doc = this.load(input, options) + // call the preprocessor extensions + const exts = doc.getParentDocument() ? undefined : doc.getExtensions() + if (exts && exts.hasPreprocessors()) { + const preprocessors = exts.getPreprocessors() + for (let j = 0; j < preprocessors.length; j++) { + doc.reader = preprocessors[j]['$process_method']()['$[]'](doc, this.reader) || doc.reader + } + } + const safeMode = resolveSafeMode(options) + if (safeMode < 20) { + // resolve the include directives, fetch the content and populate the "virtual file system" + const vfs = {} + await processUnconditionalInclude(doc, input, vfs) + const docOptions = doc.getOptions() + docOptions.vfs = toHash(vfs) + doc.options = prepareOptions(docOptions) + } + + Opal.Asciidoctor.Parser['$parse'](doc.reader, doc, toHash({ header_only: false })) + doc['$restore_attributes']() + + if (exts && exts.hasTreeProcessors()) { + const treeProcessors = exts.getTreeProcessors() + let treeProcessorResult + for (let j = 0; j < treeProcessors.length; j++) { + treeProcessorResult = treeProcessors[j]['$process_method']()['$[]'](doc) + if (treeProcessorResult && Opal.Asciidoctor.Document['$==='](treeProcessorResult) && treeProcessorResult['$!='](doc)) { + doc = treeProcessorResult + } + } + } + const result = doc.convert(options) + return result === Opal.nil ? '' : result +} diff --git a/src/template-asciidoctor-node.js b/src/template-asciidoctor-node.js index 73ab4ee6a..78cd36b62 100644 --- a/src/template-asciidoctor-node.js +++ b/src/template-asciidoctor-node.js @@ -12,6 +12,8 @@ Function.call = functionCall; //{{asciidoctorAPI}} +//{{asciidoctorNodeAPI}} + //{{asciidoctorVersion}} /** diff --git a/tasks/module/builder.js b/tasks/module/builder.js index 393fefe66..d85aff48a 100644 --- a/tasks/module/builder.js +++ b/tasks/module/builder.js @@ -114,6 +114,7 @@ const generateUMD = (asciidoctorCoreTarget, environments) => { const templateModel = { '//{{opalCode}}': fs.readFileSync('node_modules/opal-runtime/src/opal.js', 'utf8'), '//{{asciidoctorAPI}}': fs.readFileSync(apiBundle, 'utf8'), + '//{{asciidoctorNodeAPI}}': fs.readFileSync('src/asciidoctor-core-node-api.js', 'utf8'), '//{{asciidoctorVersion}}': `var ASCIIDOCTOR_JS_VERSION = '${packageJson.version}';` }