diff --git a/lighthouse-core/audits/seo/is-crawlable.js b/lighthouse-core/audits/seo/is-crawlable.js index 343941326e27..788cfc130db5 100644 --- a/lighthouse-core/audits/seo/is-crawlable.js +++ b/lighthouse-core/audits/seo/is-crawlable.js @@ -31,7 +31,7 @@ function isUnavailable(directive) { } /** - * Returns false if any of provided directives blocks page from being indexed + * Returns true if any of provided directives blocks page from being indexed * @param {string} directives * @returns {boolean} */ @@ -42,7 +42,7 @@ function hasBlockingDirective(directives) { } /** - * Returns false if robots header specifies user agent (e.g. `googlebot: noindex`) + * Returns true if robots header specifies user agent (e.g. `googlebot: noindex`) * @param {string} directives * @returns {boolean} */ diff --git a/lighthouse-core/test/audits/seo/is-crawlable-test.js b/lighthouse-core/test/audits/seo/is-crawlable-test.js index a14cfdd29240..2c669385658e 100644 --- a/lighthouse-core/test/audits/seo/is-crawlable-test.js +++ b/lighthouse-core/test/audits/seo/is-crawlable-test.js @@ -150,6 +150,7 @@ describe('SEO: Is page crawlable audit', () => { const mainResource = { responseHeaders: [ {name: 'x-robots-tag', value: 'googlebot: unavailable_after: 25 Jun 2007 15:00:00 PST'}, + {name: 'x-robots-tag', value: 'unavailable_after: 25 Jun 2045 15:00:00 PST'}, ], }; const artifacts = {