From 36e8c38f854fe349c3224e5845f47e7ff53befb0 Mon Sep 17 00:00:00 2001 From: Gaurav Nelson Date: Thu, 22 Jun 2017 16:04:56 +1000 Subject: [PATCH] first working version --- README.adoc | 0 asciidoc-link-check | 55 +++++++++++++++++++++++++++++++++++++++++++++ index.js | 18 +++++++++++++++ package.json | 41 +++++++++++++++++++++++++++++++++ 4 files changed, 114 insertions(+) create mode 100644 README.adoc create mode 100755 asciidoc-link-check create mode 100644 index.js create mode 100644 package.json diff --git a/README.adoc b/README.adoc new file mode 100644 index 0000000..e69de29 diff --git a/asciidoc-link-check b/asciidoc-link-check new file mode 100755 index 0000000..bc7ee0f --- /dev/null +++ b/asciidoc-link-check @@ -0,0 +1,55 @@ +#!/usr/bin/env node + +'use strict'; + +var chalk = require('chalk'); +var fs = require('fs'); +var asciidocLinkCheck = require('./'); +var program = require('commander'); +var request = require('request'); +var url = require('url'); +var path = require('path'); + +var statusLabels = { + alive: chalk.green('✓'), + dead: chalk.red('✖') +}; + +var error = false; +var opts = {}; +var stream = process.stdin; // read from stdin unless a filename is given +program.arguments('[filenameOrUrl]').action(function (filenameOrUrl) { + if (/https?:/.test(filenameOrUrl)) { + stream = request.get(filenameOrUrl); + try { // extract baseUrl from supplied URL + var parsed = url.parse(filenameOrUrl); + delete parsed.search; + delete parsed.hash; + if (parsed.pathname.lastIndexOf('/') !== -1) { + parsed.pathname = parsed.pathname.substr(0, parsed.pathname.lastIndexOf('/') + 1); + } + opts.baseUrl = url.format(parsed); + } catch (err) { /* ignore error */ } + } else { + opts.baseUrl = 'file://' + path.dirname(path.resolve(filenameOrUrl)); + stream = fs.createReadStream(filenameOrUrl); + } +}).parse(process.argv); + +var asciidoc = ''; // collect the asciidoc data, then process it +stream.on('data', function (chunk) { + asciidoc += chunk.toString(); +}).on('end', function () { + asciidocLinkCheck(asciidoc, opts, function (err, results) { + results.forEach(function (result) { + if(result.status === 'dead') { + error = true; + } + console.log('[%s] %s', statusLabels[result.status], result.link); + }); + if(error) { + console.error(chalk.red('\nERROR: dead links found!')); + process.exit(1); + } + }); +}); diff --git a/index.js b/index.js new file mode 100644 index 0000000..77185e6 --- /dev/null +++ b/index.js @@ -0,0 +1,18 @@ +'use strict'; + +var _ = require('lodash'); +var async = require('async'); +var linkCheck = require('link-check'); +var asciidocLinkExtractor = require('asciidoc-link-extractor'); + +module.exports = function asciidocLinkCheck(asciidoc, opts, callback) { + if (arguments.length === 2 && typeof opts === 'function') { + // optional 'opts' not supplied. + callback = opts; + opts = {}; + } + + async.mapLimit(_.uniq(asciidocLinkExtractor(asciidoc)), 2, function (link, callback) { + linkCheck(link, opts, callback); + }, callback); +}; diff --git a/package.json b/package.json new file mode 100644 index 0000000..42dbec5 --- /dev/null +++ b/package.json @@ -0,0 +1,41 @@ +{ + "name": "asciidoc-link-check", + "version": "0.0.0", + "description": "Checks all hyperlinks in an asciidoc file if they are alive or dead.", + "bin": { + "asciidoc-link-check": "asciidoc-link-check" + }, + "main": "index.js", + "scripts": { + "pretest": "jshint index.js asciidoc-link-check", + "test": "mocha -R spec" + }, + "repository": { + "type": "git", + "url": "git://github.com/tcort/asciidoc-link-check.git" + }, + "keywords": [ + "asciidoc", + "adoc", + "link", + "hyperlink", + "href", + "check", + "checker" + ], + "author": "Gaurav Nelson", + "license": "MIT", + "bugs": { + "url": "https://github.com/gaurav-nelson/asciidoc-link-check/issues" + }, + "homepage": "https://github.com/gaurav-nelson/asciidoc-link-check#readme", + "dependencies": { + "async": "^2.1.4", + "chalk": "^1.1.3", + "commander": "^2.9.0", + "link-check": "^4.0.2", + "asciidoc-link-extractor": "^1.0.0", + "request": "^2.79.0", + "lodash": "^4.17.4" + } +}