From 414ae5d638d27f6cb9d85d5d74bc8d0e0b99198d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Luis=20Gonz=C3=A1lez?= Date: Mon, 14 Oct 2024 13:41:48 +0200 Subject: [PATCH] [Search][Ent Search deprecation] Web Crawler tile points out GH repo instead become disabled (#194743) ## Summary This PR sets the Web Crawler tile to point out the external Open Web Crawler repo when there is no ent-search node running rather than become disabled using the `crawlerDisabled` Before: ![CleanShot 2024-10-02 at 18 25 57@2x](https://github.com/user-attachments/assets/2cffe7c8-fbb1-4192-956f-69ba8ec5529a) After: ![CleanShot 2024-10-02 at 18 25 11@2x](https://github.com/user-attachments/assets/fcf7ac0f-2985-4b7a-9100-3968054505c7) Also the empty state of Web crawler points out to the Source code repo when there is no ent-search instance running using the `errorConnectingMessage`. This improvement should fix this issue https://github.com/elastic/search-team/issues/8319?reload=1?reload=1 ![CleanShot 2024-10-08 at 11 48 44@2x](https://github.com/user-attachments/assets/1dedc24e-e23a-4188-a676-f910a9b2ce6c) ### Checklist Delete any items that are not applicable to this PR. - [ ] Any text added follows [EUI's writing guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses sentence case text and includes [i18n support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md) - [ ] [Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html) was added for features that require explanation or tutorials - [ ] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios - [ ] [Flaky Test Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was used on any tests changed - [ ] Any UI touched in this PR is usable by keyboard only (learn more about [keyboard accessibility](https://webaim.org/techniques/keyboard/)) - [ ] Any UI touched in this PR does not create any new axe failures (run axe in browser: [FF](https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/), [Chrome](https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US)) - [ ] If a plugin configuration key changed, check if it needs to be allowlisted in the cloud and added to the [docker list](https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker) - [ ] This renders correctly on smaller devices using a responsive layout. (You can test this [in your browser](https://www.browserstack.com/guide/responsive-testing-on-local-server)) - [ ] This was checked for [cross-browser compatibility](https://www.elastic.co/support/matrix#matrix_browsers) ### Risk Matrix Delete this section if it is not applicable to this PR. Before closing this PR, invite QA, stakeholders, and other developers to identify risks that should be tested prior to the change/feature release. When forming the risk matrix, consider some of the following examples and how they may potentially impact the change: | Risk | Probability | Severity | Mitigation/Notes | |---------------------------|-------------|----------|-------------------------| | Multiple Spaces—unexpected behavior in non-default Kibana Space. | Low | High | Integration tests will verify that all features are still supported in non-default Kibana Space and when user switches between spaces. | | Multiple nodes—Elasticsearch polling might have race conditions when multiple Kibana nodes are polling for the same tasks. | High | Low | Tasks are idempotent, so executing them multiple times will not result in logical error, but will degrade performance. To test for this case we add plenty of unit tests around this logic and document manual testing procedure. | | Code should gracefully handle cases when feature X or plugin Y are disabled. | Medium | High | Unit tests will verify that any feature flag or plugin combination still results in our service operational. | | [See more potential risk examples](https://github.com/elastic/kibana/blob/main/RISK_MATRIX.mdx) | ### For maintainers - [ ] This was checked for breaking API changes and was [labeled appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process) --------- Co-authored-by: Elastic Machine --- .../enterprise_search/common/constants.ts | 4 ++ .../connectors/crawler_empty_state.tsx | 45 +++++++++++++------ .../shared/ingestion_card/ingestion_card.tsx | 13 ++++++ .../product_selector/ingestion_selector.tsx | 34 ++++++++++---- .../applications/shared/icons/github_icon.tsx | 32 +++++++++++++ .../translations/translations/fr-FR.json | 1 - .../translations/translations/ja-JP.json | 1 - .../translations/translations/zh-CN.json | 1 - 8 files changed, 106 insertions(+), 25 deletions(-) create mode 100644 x-pack/plugins/enterprise_search/public/applications/shared/icons/github_icon.tsx diff --git a/x-pack/plugins/enterprise_search/common/constants.ts b/x-pack/plugins/enterprise_search/common/constants.ts index 10b472b1efca1..795237ef9b427 100644 --- a/x-pack/plugins/enterprise_search/common/constants.ts +++ b/x-pack/plugins/enterprise_search/common/constants.ts @@ -281,5 +281,9 @@ export const PLUGIN_ID = 'enterpriseSearch'; export const CONNECTOR_NATIVE_TYPE = 'native'; export const CONNECTOR_CLIENTS_TYPE = 'connector_clients'; +export const CRAWLER = { + github_repo: 'https://github.com/elastic/crawler', +}; + // TODO remove this once the connector service types are no longer in "example" state export const EXAMPLE_CONNECTOR_SERVICE_TYPES = ['opentext_documentum']; diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/connectors/crawler_empty_state.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/connectors/crawler_empty_state.tsx index 8e5b91b94e39b..5a03d0560dfbf 100644 --- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/connectors/crawler_empty_state.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/connectors/crawler_empty_state.tsx @@ -11,7 +11,9 @@ import { useValues } from 'kea'; import { EuiButton, EuiEmptyPrompt, EuiPanel } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; +import { CRAWLER } from '../../../../../common/constants'; import { HttpLogic } from '../../../shared/http'; +import { GithubIcon } from '../../../shared/icons/github_icon'; import { KibanaLogic } from '../../../shared/kibana'; import { NEW_CRAWLER_PATH } from '../../routes'; @@ -40,19 +42,36 @@ export const CrawlerEmptyState: React.FC = () => {

} actions={ - KibanaLogic.values.navigateToUrl(NEW_CRAWLER_PATH)} - > - {i18n.translate('xpack.enterpriseSearch.crawlerEmptyState.newWebCrawlerButtonLabel', { - defaultMessage: 'New web crawler', - })} - + Boolean(errorConnectingMessage) ? ( + + {i18n.translate( + 'xpack.enterpriseSearch.crawlerEmptyState.openSourceCrawlerButtonLabel', + { + defaultMessage: 'Source code', + } + )} + + ) : ( + KibanaLogic.values.navigateToUrl(NEW_CRAWLER_PATH)} + > + {i18n.translate('xpack.enterpriseSearch.crawlerEmptyState.newWebCrawlerButtonLabel', { + defaultMessage: 'New web crawler', + })} + + ) } /> diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/shared/ingestion_card/ingestion_card.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/shared/ingestion_card/ingestion_card.tsx index 0d01eea4e6787..94bbc515f92bd 100644 --- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/shared/ingestion_card/ingestion_card.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/shared/ingestion_card/ingestion_card.tsx @@ -18,6 +18,8 @@ import { IconType, } from '@elastic/eui'; +import { i18n } from '@kbn/i18n'; + import { EuiLinkTo } from '../../../../shared/react_router_helpers'; interface IngestionCardProps { @@ -25,6 +27,7 @@ interface IngestionCardProps { buttonLabel: string; description: string; href?: string; + isBeta?: boolean; isDisabled?: boolean; logo: IconType; onClick?: () => void; @@ -37,6 +40,7 @@ export const IngestionCard: React.FC = ({ description, href, isDisabled, + isBeta, logo, onClick, title, @@ -44,6 +48,15 @@ export const IngestionCard: React.FC = ({ return ( { @@ -76,13 +78,23 @@ export const IngestionSelector: React.FC = () => { {productFeatures.hasWebCrawler && ( { 'Discover, extract, and index searchable content from websites and knowledge bases.', } )} - href={generatePath(ENTERPRISE_SEARCH_CONTENT_PLUGIN.URL + NEW_CRAWLER_PATH)} - isDisabled={crawlerDisabled} + href={ + crawlerDisabled + ? CRAWLER.github_repo + : generatePath(ENTERPRISE_SEARCH_CONTENT_PLUGIN.URL + NEW_CRAWLER_PATH) + } + isBeta={crawlerDisabled} logo={crawlerLogo} title={i18n.translate('xpack.enterpriseSearch.ingestSelector.method.crawler', { defaultMessage: 'Web Crawler', diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/icons/github_icon.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/icons/github_icon.tsx new file mode 100644 index 0000000000000..0fc9160272838 --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/shared/icons/github_icon.tsx @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; + +export const GithubIcon = () => { + return ( + + + + + + + + + + + ); +}; diff --git a/x-pack/plugins/translations/translations/fr-FR.json b/x-pack/plugins/translations/translations/fr-FR.json index cd83be255a90e..af71b7b1b9eda 100644 --- a/x-pack/plugins/translations/translations/fr-FR.json +++ b/x-pack/plugins/translations/translations/fr-FR.json @@ -17385,7 +17385,6 @@ "xpack.enterpriseSearch.ingestSelector.method.connectors.description": "Extraire, transformer, indexer et synchroniser des données issues d'une source de données tiers.", "xpack.enterpriseSearch.ingestSelector.method.crawler": "Robot d'indexation", "xpack.enterpriseSearch.ingestSelector.method.crawler.description": "Découvrir, extraire et indexer du contenu interrogeable provenant de sites web et de bases de connaissances.", - "xpack.enterpriseSearch.ingestSelector.method.crawlerButtonLabel": "Indexer l'URL", "xpack.enterpriseSearch.ingestSelector.method.fileUpload": "Charger un fichier", "xpack.enterpriseSearch.ingestSelector.method.fileUpload.description": "Fichiers texte délimités, tels que CSV et TSV, JSON délimité par une nouvelle ligne.", "xpack.enterpriseSearch.ingestSelector.method.fileUploadLabel": "Choisir un fichier", diff --git a/x-pack/plugins/translations/translations/ja-JP.json b/x-pack/plugins/translations/translations/ja-JP.json index bed5620413f7b..cdd8afc68af2e 100644 --- a/x-pack/plugins/translations/translations/ja-JP.json +++ b/x-pack/plugins/translations/translations/ja-JP.json @@ -17131,7 +17131,6 @@ "xpack.enterpriseSearch.ingestSelector.method.connectors.description": "サードパーティのデータソースからデータを抽出、変換、インデックス化、同期します。", "xpack.enterpriseSearch.ingestSelector.method.crawler": "Webクローラー", "xpack.enterpriseSearch.ingestSelector.method.crawler.description": "Webサイトやナレッジベースから検索可能なコンテンツを検出、抽出、インデックス化します。", - "xpack.enterpriseSearch.ingestSelector.method.crawlerButtonLabel": "クロールURL", "xpack.enterpriseSearch.ingestSelector.method.fileUpload": "ファイルをアップロード", "xpack.enterpriseSearch.ingestSelector.method.fileUpload.description": "CSVやTSV、改行区切りのJSONなどの区切られたテキストファイル。", "xpack.enterpriseSearch.ingestSelector.method.fileUploadLabel": "ファイルを選択", diff --git a/x-pack/plugins/translations/translations/zh-CN.json b/x-pack/plugins/translations/translations/zh-CN.json index 026f4f6a76dd6..b94fb455c8ad5 100644 --- a/x-pack/plugins/translations/translations/zh-CN.json +++ b/x-pack/plugins/translations/translations/zh-CN.json @@ -17160,7 +17160,6 @@ "xpack.enterpriseSearch.ingestSelector.method.connectors.description": "提取、转换、索引和同步来自第三方数据源的数据。", "xpack.enterpriseSearch.ingestSelector.method.crawler": "网络爬虫", "xpack.enterpriseSearch.ingestSelector.method.crawler.description": "发现、提取和索引网站和知识库中的可搜索内容。", - "xpack.enterpriseSearch.ingestSelector.method.crawlerButtonLabel": "爬网 URL", "xpack.enterpriseSearch.ingestSelector.method.fileUpload": "上传文件", "xpack.enterpriseSearch.ingestSelector.method.fileUpload.description": "分隔的文本文件,例如 CSV 和 TSV、换行符分隔的 JSON。", "xpack.enterpriseSearch.ingestSelector.method.fileUploadLabel": "选择文件",