diff --git a/.circleci/config.yml b/.circleci/config.yml index 5734c8d646..b987a01dd1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -27,11 +27,11 @@ executors: - image: cimg/aws:2024.03 commands: sparse_checkout: - description: "Checkout a sparse directory from a specific branch." + description: "Checkout sparse directories from a specific branch." parameters: - directory: + directories: type: string - description: "Directory to checkout sparsely" + description: "Comma-separated list of directories to checkout sparsely" branch: type: string description: "Branch to checkout" @@ -45,10 +45,14 @@ commands: command: | git clone --no-checkout --filter=blob:none << pipeline.project.git_url >>.git . - run: - name: Setup Sparse Checkout + name: Sparse Checkout + environment: + DIRECTORIES: "<< parameters.directories >>" command: | git config core.sparseCheckout true - echo "<< parameters.directory >>/*" >> .git/info/sparse-checkout + echo $DIRECTORIES | tr ',' '\n' | while read dir; do + echo "$dir" | tee -a .git/info/sparse-checkout + done - run: name: Checkout Branch command: | @@ -101,7 +105,7 @@ commands: } }" notify_slack: - description: "Notify slack of a deploy to production" + description: "Notify Slack with message" parameters: slack_bot_token: description: "Slack bot token" @@ -109,24 +113,123 @@ commands: slack_channel: description: "Slack channel name to post the message to" type: string + message_text: + description: "Message text to post to Slack" + type: string + default: "" + message_text_file: + description: "message text_file" + type: string + default: "" steps: - - run: - name: Notify Slack of Deployment - command: | - # Check if the CIRCLE_PULL_REQUEST variable is set and extract the PR number from it - if [ ! -z "$CIRCLE_PULL_REQUEST" ]; then - PR_NUMBER=${CIRCLE_PULL_REQUEST##*/} - MESSAGE_TEXT=":rocket: Deployment of PR <$CIRCLE_PULL_REQUEST|$PR_NUMBER> to production was successful!" - else - MESSAGE_TEXT=":rocket: Deployment to production was successful!" - fi + - run: + name: Notify Slack + command: | + set -x + # Ensure the $BASH_ENV file exists + if [ ! -f $BASH_ENV ]; then + touch $BASH_ENV + fi + + source $BASH_ENV + cat $BASH_ENV + + # Evaluate message_text_script if provided + if [ -n "<< parameters.message_text_file >>" ]; then + MESSAGE_TEXT=$(cat "<< parameters.message_text_file >>") + else + MESSAGE_TEXT="<< parameters.message_text >>" + fi + + echo $MESSAGE_TEXT + + # Ensure all parameters are provided + if [ -z "<< parameters.slack_bot_token >>" ] || [ -z "<< parameters.slack_channel >>" ] || [ -z "$MESSAGE_TEXT" ]; then + echo "Missing required parameters. Notification will not be sent." + exit 1 + fi + + response=$(curl -s -X POST \ + -H "Authorization: Bearer << parameters.slack_bot_token >>" \ + -H 'Content-type: application/json;charset=utf-8' \ + --data "{ + \"channel\": \"<< parameters.slack_channel >>\", + \"text\": \"$MESSAGE_TEXT\" + }" \ + https://slack.com/api/chat.postMessage) + + ok=$(echo $response | jq -r '.ok') + error=$(echo $response | jq -r '.error') + + if [ "$ok" != "true" ]; then + echo "Slack notification failed: $error" + exit 1 + else + echo "Slack notification sent successfully" + fi + + notify_slack_deploy: + parameters: + slack_bot_token: + description: "Slack bot token" + type: string + slack_channel: + description: "Slack channel name to post the message to" + type: string + environment_name: + description: "Name of environment" + type: string + steps: + - run: + name: Generate Message + command: | + # Determine the environment URL + case "<< parameters.environment_name >>" in + sandbox) + ENV_URL="https://tta-smarthub-sandbox.app.cloud.gov/" + ;; + dev) + ENV_URL="https://tta-smarthub-dev.app.cloud.gov/" + ;; + staging) + ENV_URL="https://tta-smarthub-staging.app.cloud.gov/" + ;; + production) + ENV_URL="https://ttahub.ohs.acf.hhs.gov" + ;; + *) + ENV_URL="#" + ;; + esac + + env_name="<< parameters.environment_name >>" + + if [ ! -z "${CIRCLE_PULL_REQUEST}" ]; then + PR_NUMBER=${CIRCLE_PULL_REQUEST##*/} + + PR_TITLE=$(curl -s "${CIRCLE_PULL_REQUEST}" | sed -e :a -e "N; s/\n/ /g; ta" | grep -oP "[^<]+" | sed -re "s~<[^>]+>~~g") + + if [ ! -z "${PR_TITLE}" ]; then + JIRA_URLS=$(curl -s "${CIRCLE_PULL_REQUEST}" | sed -e :a -e "N; s/\n/ /g; ta" | grep -oP "Issue[(]s[)].*Checklists" | grep -oP "\"https[^\"]+\"" | sed -e "s~\"~~g" | grep -o "https://jira.acf.gov/browse/[A-Z0-9-]*") + + MESSAGE_TEXT=":rocket: Deployment of PR <${CIRCLE_PULL_REQUEST}|${PR_NUMBER}> (${PR_TITLE}) to <${ENV_URL}|${env_name}> was successful!" + if [ ! -z "${JIRA_URLS}" ]; then + MESSAGE_TEXT="${MESSAGE_TEXT}\nJIRA URLs in the PR:\n${JIRA_URLS}" + fi + else + MESSAGE_TEXT=":rocket: Deployment of PR <${CIRCLE_PULL_REQUEST}|${PR_NUMBER}> to <${ENV_URL}|${env_name}> was successful!" + fi + else + MESSAGE_TEXT=":rocket: Deployment to <${ENV_URL}|${env_name}> was successful!" + fi + + echo -e "${MESSAGE_TEXT}" > /tmp/message_file + + - notify_slack: + slack_bot_token: << parameters.slack_bot_token >> + slack_channel: << parameters.slack_channel >> + message_text_file: "/tmp/message_file" - curl -X POST -H "Authorization: Bearer << parameters.slack_bot_token >>" \ - -H 'Content-type: application/json;charset=utf-8' \ - --data "{ - \"channel\": \"<< parameters.slack_channel >>\", - \"text\": \"$MESSAGE_TEXT\" - }" https://slack.com/api/chat.postMessage cf_deploy: description: "Login to cloud foundry space with service account credentials @@ -308,12 +411,13 @@ commands: sudo apt-get update # Install uuid-runtime to have access to uuidgen # Install pv wget - sudo apt-get install pv uuid-runtime wget + sudo apt-get install -y pv uuid-runtime wget coreutils jq + # Install Cloud Foundry CLI wget -q -O - https://packages.cloudfoundry.org/debian/cli.cloudfoundry.org.key | sudo apt-key add - echo "deb https://packages.cloudfoundry.org/debian stable main" | sudo tee /etc/apt/sources.list.d/cloudfoundry-cli.list sudo apt-get update - sudo apt-get install cf8-cli + sudo apt-get install -y cf8-cli # Install plugin needed for connect-to-service cf install-plugin -f https://github.com/cloud-gov/cf-service-connect/releases/download/v1.1.3/cf-service-connect_linux_amd64 @@ -339,6 +443,28 @@ commands: -p ${<< parameters.cloudgov_password >>} \ -o << pipeline.parameters.cg_org >> \ -s ${<< parameters.cloudgov_space >>} + - run: + name: Start Log Monitoring + command: | + #!/bin/bash + CONTROL_FILE="/tmp/stop_tail" + rm -f $CONTROL_FILE + + # Start tailing logs + cf logs tta-automation & + + # Get the PID of the cf logs command + TAIL_PID=$! + + # Wait for the control file to be created + while [ ! -f $CONTROL_FILE ]; do + sleep 1 + done + + # Kill the cf logs command + kill -9 $TAIL_PID + echo "cf logs command for tta-automation has been terminated." + background: true - run: name: cf_lambda - script to trigger backup command: | @@ -364,10 +490,32 @@ commands: environment: CF_RDS_SERVICE_NAME: ttahub-prod CF_S3_SERVICE_NAME: ttahub-db-backups + - run: + name: Generate Message + command: | + if [ ! -z "$CIRCLE_PULL_REQUEST" ]; then + PR_NUMBER=${CIRCLE_PULL_REQUEST##*/} + echo ":download::database: Production backup before PR <$CIRCLE_PULL_REQUEST|$PR_NUMBER> successful!" > /tmp/message_file + else + echo ":download::database: Production backup successful!" > /tmp/message_file + fi + - notify_slack: + slack_bot_token: $SLACK_BOT_TOKEN + slack_channel: "acf-head-start-eng" + message_text_file: "/tmp/message_file" - run: name: Logout of service account command: | + # Signal the log monitoring to stop + CONTROL_FILE="/tmp/stop_tail" + touch $CONTROL_FILE + + # Wait for the log monitoring process to terminate + sleep 5 + + # Logout from Cloud Foundry cf logout + parameters: cg_org: description: "Cloud Foundry cloud.gov organization name" @@ -406,7 +554,7 @@ parameters: default: "al-ttahub-2939-add-fei-root-cause-to-goal-card" type: string sandbox_git_branch: # change to feature branch to test deployment - default: "gh/cfignore-keep-sql-drop-tests" + default: "mb/TTAHUB-3198/training-report-alerts" type: string prod_new_relic_app_id: default: "877570491" @@ -633,6 +781,11 @@ jobs: - run: name: Wait for server to start command: ./bin/ping-server 3000 + - run: + name: Monitor database + command: | + docker attach $(docker ps | grep postgres | awk '{print $1}') + background: true - run: name: Install playwright dependencies command: | @@ -843,6 +996,11 @@ jobs: env_name: sandbox new_relic_app_id: << pipeline.parameters.sandbox_new_relic_app_id >> new_relic_api_key: $NEW_RELIC_REST_API_KEY + - notify_slack_deploy: + slack_bot_token: $SLACK_BOT_TOKEN + slack_channel: "acf-head-start-github" + environment_name: "sandbox" + - when: # dev condition: and: @@ -897,6 +1055,11 @@ jobs: env_name: dev new_relic_app_id: << pipeline.parameters.dev_new_relic_app_id >> new_relic_api_key: $NEW_RELIC_REST_API_KEY + - notify_slack_deploy: + slack_bot_token: $SLACK_BOT_TOKEN + slack_channel: "acf-head-start-github" + environment_name: "dev" + - when: # staging condition: and: @@ -949,6 +1112,11 @@ jobs: env_name: staging new_relic_app_id: << pipeline.parameters.staging_new_relic_app_id >> new_relic_api_key: $NEW_RELIC_REST_API_KEY + - notify_slack_deploy: + slack_bot_token: $SLACK_BOT_TOKEN + slack_channel: "acf-head-start-github" + environment_name: "staging" + - when: # prod condition: and: @@ -1001,16 +1169,18 @@ jobs: env_name: prod new_relic_app_id: << pipeline.parameters.prod_new_relic_app_id >> new_relic_api_key: $NEW_RELIC_REST_API_KEY - - notify_slack: + - notify_slack_deploy: slack_bot_token: $SLACK_BOT_TOKEN slack_channel: "acf-ohs-ttahub--contractor-customer-team" + environment_name: "production" + resource_class: large backup_upload_production: docker: - image: cimg/base:2024.05 steps: - sparse_checkout: - directory: 'automation' + directories: 'automation' branch: << pipeline.git.branch >> - cf_backup: auth_client_secret: PROD_AUTH_CLIENT_SECRET diff --git a/README.md b/README.md index 4d8a777981..da8cbf0446 100644 --- a/README.md +++ b/README.md @@ -597,6 +597,9 @@ If you see nothing there, you'll need to add an appropriate policy. ```cf add-network-policy tta-smarthub-APP_NAME clamav-api-ttahub-APP_NAME --protocol tcp --port 9443``` ex: ```cf add-network-policy tta-smarthub-dev clamav-api-ttahub-dev --protocol tcp --port 9443``` +You may need to connect across spaces (for example, our clamav-api-ttahub-dev app is shared by all of our ephemeral environments). If so, use the -s flag. +ex: +```cf add-network-policy tta-smarthub-staging -s ttahub-dev clamav-api-ttahub-dev --protocol tcp --port 9443``` diff --git a/automation/ci/scripts/cf_lambda.sh b/automation/ci/scripts/cf_lambda.sh index d0b9cdfcdb..8ce82d261a 100644 --- a/automation/ci/scripts/cf_lambda.sh +++ b/automation/ci/scripts/cf_lambda.sh @@ -6,6 +6,9 @@ set -o pipefail set -o noglob set -o noclobber +# Source the environment file to get the URLs +source /etc/environment + # ----------------------------------------------------------------------------- # Generic helper functions # ----------------------------------------------------------------------------- @@ -423,7 +426,7 @@ function run_task { function monitor_task { local app_name=$1 local task_name=$2 - local timeout=${3:-200} # Default timeout in seconds + local timeout=${3:-300} # Default timeout in seconds validate_parameters "$app_name" validate_parameters "$task_name" local start_time diff --git a/automation/db-backup/scripts/db_backup.sh b/automation/db-backup/scripts/db_backup.sh index dde763d5fa..0c0f4421ec 100644 --- a/automation/db-backup/scripts/db_backup.sh +++ b/automation/db-backup/scripts/db_backup.sh @@ -666,41 +666,186 @@ perform_backup_and_upload() { fi set -e } + +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# Backup Retention +# ----------------------------------------------------------------------------- +backup_retention() { + log "INFO" "Starting backup retention process" + + local backup_filename_prefix=$1 + local s3_bucket=$AWS_DEFAULT_BUCKET + + log "INFO" "Fetching the list of backup objects" + BACKUPS=$(aws s3api list-objects-v2 --bucket $s3_bucket --prefix ${backup_filename_prefix}/ --query 'Contents[].[Key, LastModified]' --output text) || { + log "ERROR" "Failed to fetch list of backup objects" + set -e + return 1 + } + + NOW=$(date -u +"%Y-%m-%dT%H:%M:%SZ") + + date_diff() { + d1=$(date -d "$1" +%s) + d2=$(date -d "$2" +%s) + echo $(( (d1 - d2) / 86400 )) + } + + get_base_name() { + echo "$1" | sed -e 's/\.[a-z0-9]*$//' + } + + declare -A backup_sets + declare -A processed_dates + + while IFS= read -r line; do + KEY=$(echo $line | awk '{print $1}') + LAST_MODIFIED=$(echo $line | awk '{print $2}') + + BASE_NAME=$(get_base_name "$KEY") + if [ -z "${backup_sets[$BASE_NAME]+isset}" ]; then + backup_sets[$BASE_NAME]="$LAST_MODIFIED" + fi + done <<< "$BACKUPS" + + delete_backup_set() { + BASE_NAME=$1 + for EXT in ".zip" ".pwd" ".md5" ".sha256"; do + KEY="${BASE_NAME}${EXT}" + log "INFO" "Deleting $KEY" + aws s3 rm "s3://${s3_bucket}/${KEY}" || { + log "ERROR" "Failed to delete $KEY" + set -e + return 1 + } + done + } + + for BASE_NAME in "${!backup_sets[@]}"; do + LAST_MODIFIED=${backup_sets[$BASE_NAME]} + AGE=$(date_diff $NOW $LAST_MODIFIED) + + if [ $AGE -le 30 ]; then + continue + elif [ $AGE -le 60 ]; then + DATE=$(date -d $LAST_MODIFIED +%Y-%m-%d) + if [ "${processed_dates[$DATE]+isset}" ]; then + delete_backup_set $BASE_NAME || { + log "ERROR" "Failed to delete backup set for $BASE_NAME" + set -e + return 1 + } + else + processed_dates[$DATE]=true + fi + elif [ $AGE -le 90 ]; then + if [ $(date -d $LAST_MODIFIED +%u) -eq 1 ] || [ $(date -d $LAST_MODIFIED +%d) -eq 1 ] || [ $(date -d $LAST_MODIFIED +%d) -eq 15 ]; then + continue + else + delete_backup_set $BASE_NAME || { + log "ERROR" "Failed to delete backup set for $BASE_NAME" + set -e + return 1 + } + fi + elif [ $AGE -le 730 ]; then + if [ $(date -d $LAST_MODIFIED +%d) -eq 1 ]; then + continue + else + delete_backup_set $BASE_NAME || { + log "ERROR" "Failed to delete backup set for $BASE_NAME" + set -e + return 1 + } + fi + else + delete_backup_set $BASE_NAME || { + log "ERROR" "Failed to delete backup set for $BASE_NAME" + set -e + return 1 + } + fi + done + + log "INFO" "Backup retention process completed" +} # ----------------------------------------------------------------------------- function main() { local backup_filename_prefix=$1 local rds_server=$2 local aws_s3_server=$3 + local duration=${4-86400} # Default duration to 24 hours log "INFO" "Validate parameters and exports" parameters_validate "${backup_filename_prefix}" parameters_validate "${rds_server}" parameters_validate "${aws_s3_server}" + parameters_validate "${duration}" export_validate "VCAP_SERVICES" log "INFO" "Verify or install awscli" - run_script 'awscli_install.sh' '../../common/scripts/' + run_script 'awscli_install.sh' '../../common/scripts/' || { + log "ERROR" "Failed to install or verify awscli" + set -e + exit 1 + } + log "INFO" "Verify or install postgrescli" - run_script 'postgrescli_install.sh' '../../common/scripts/' + run_script 'postgrescli_install.sh' '../../common/scripts/' || { + log "ERROR" "Failed to install or verify postgrescli" + set -e + exit 1 + } log "INFO" "add the bin dir for the new cli tools to PATH" add_to_path '/tmp/local/bin' - log "INFO" "check dependancies" + log "INFO" "check dependencies" check_dependencies aws md5sum openssl pg_dump pg_isready sha256sum zip log "INFO" "collect and configure credentials" - rds_prep "${VCAP_SERVICES}" "${rds_server}" - aws_s3_prep "${VCAP_SERVICES}" "${aws_s3_server}" + rds_prep "${VCAP_SERVICES}" "${rds_server}" || { + log "ERROR" "Failed to prepare RDS credentials" + set -e + exit 1 + } + + aws_s3_prep "${VCAP_SERVICES}" "${aws_s3_server}" || { + log "ERROR" "Failed to prepare AWS S3 credentials" + set -e + exit 1 + } log "INFO" "verify rds & s3 connectivity" - rds_test_connectivity - s3_test_connectivity + rds_test_connectivity || { + log "ERROR" "RDS connectivity test failed" + set -e + exit 1 + } + + s3_test_connectivity || { + log "ERROR" "S3 connectivity test failed" + set -e + exit 1 + } - log "INFO" "backup, upload, verfity db" - perform_backup_and_upload "${backup_filename_prefix}" + log "INFO" "backup, upload, verify db" + perform_backup_and_upload "${backup_filename_prefix}" || { + log "ERROR" "Backup and upload process failed" + set -e + exit 1 + } + + log "INFO" "run backup retention" + backup_retention "${backup_filename_prefix}" || { + log "ERROR" "Backup retention process failed" + set -e + exit 1 + } log "INFO" "clear the populated env vars" rds_clear diff --git a/bin/README.md b/bin/README.md index 3a0fcfc771..5caa9b38cc 100644 --- a/bin/README.md +++ b/bin/README.md @@ -1,6 +1,6 @@ # S3 Backup Retrieval Tool -This script, named `latest_backup.sh`, is designed to interact with Cloud Foundry's S3 service instances to perform various operations such as creating service keys, retrieving and verifying AWS credentials, and generating presigned URLs for files stored in an S3 bucket. Additionally, the script handles the deletion of service keys post-operation based on user preference. +This script, named `latest_backup.sh`, is designed to interact with Cloud Foundry's S3 service instances to perform various operations such as creating service keys, retrieving and verifying AWS credentials, generating presigned URLs for files stored in an S3 bucket, and handling the deletion of service keys post-operation based on user preference. Additionally, it can list all ZIP files in the S3 folder, download and verify specific backup files, and erase files from S3. ## Features @@ -9,6 +9,10 @@ This script, named `latest_backup.sh`, is designed to interact with Cloud Foundr - **AWS Credential Verification**: Verifies that AWS credentials are valid. - **Backup File Retrieval**: Retrieves the path for the latest backup file from an S3 bucket and downloads it. - **Presigned URL Generation**: Generates AWS S3 presigned URLs for the specified files, allowing secure, temporary access without requiring AWS credentials. +- **File Listing**: Lists all ZIP files in the specified S3 folder, along with their corresponding pwd, md5, sha256 files, sizes, and ages. +- **Download and Verification**: Downloads a specific backup file and verifies its integrity using MD5 and SHA-256 checksums. +- **File Erasure**: Deletes a specified set of files (ZIP, pwd, md5, sha256) from S3. +- **Old Service Key Deletion**: Deletes service keys older than 6 hours. - **Clean-up Options**: Optionally deletes the service key used during the operations to maintain security. ## Prerequisites @@ -16,22 +20,59 @@ This script, named `latest_backup.sh`, is designed to interact with Cloud Foundr - Cloud Foundry CLI (cf CLI) version 8.0.0 or higher. - AWS CLI installed and configured on the machine where the script will be run. - JQ for parsing JSON output. It must be installed on the machine running the script. -- You must be logged into the Cloud Foundary production environment using the following command before running the script: +- You must be logged into the Cloud Foundry production environment using the following command before running the script: -```BASH +```bash cf login -a api.fr.cloud.gov --sso ``` ## Usage -To use this script, you may need to provide up to three arguments: +To use this script, you may need to provide various arguments based on the required operation: -1. **CF_S3_SERVICE_NAME**: The name of the Cloud Foundary S3 service instance (optional). -2. **s3_folder**: The specific folder within the S3 bucket where `latest-backup.txt` is located (optional). -3. **DELETION_ALLOWED**: Whether to allow deletion of the service key post-operation. Set this to 'yes' to enable deletion (optional). +1. **CF_S3_SERVICE_NAME**: The name of the Cloud Foundry S3 service instance (optional, default: `ttahub-db-backups`). +2. **s3_folder**: The specific folder within the S3 bucket where `latest-backup.txt` is located (optional, default: `production`). +3. **DELETION_ALLOWED**: Whether to allow deletion of the service key post-operation. Set this to 'yes' to enable deletion (optional, default: `no`). +4. **list_zip_files**: Whether to list all ZIP files in the S3 folder. Set this to 'yes' to enable listing (optional, default: `no`). +5. **specific_file**: The specific backup file to process (optional). +6. **download_and_verify**: Whether to download and verify the specific backup file. Set this to 'yes' to enable download and verification (optional, default: `no`). +7. **erase_file**: The specific file to erase from S3 (optional). +8. **delete_old_keys**: Whether to delete old service keys. Set this to 'yes' to enable deletion (optional, default: `no`). ### Basic Command ```bash -./latest_backup.sh [CF_S3_SERVICE_NAME] [s3_folder] [DELETION_ALLOWED] +./latest_backup.sh [--service-name ] [--s3-folder ] [--allow-deletion] [--list-zip-files] [--specific-file ] [--download-and-verify] [--erase-file ] [--delete-old-keys] +``` + +### Example Commands + +- Generate presigned URLs for the latest backup: + +```bash +./latest_backup.sh +``` + +- List all ZIP files in the specified S3 folder: + +```bash +./latest_backup.sh --list-zip-files +``` + +- Download and verify a specific backup file: + +```bash +./latest_backup.sh --specific-file my-backup.zip --download-and-verify +``` + +- Erase a specific file and its associated files from S3: + +```bash +./latest_backup.sh --erase-file my-backup.zip +``` + +- Delete old service keys older than 6 hours: + +```bash +./latest_backup.sh --delete-old-keys ``` diff --git a/bin/latest_backup.sh b/bin/latest_backup.sh index 40b51c94fa..f4918a2c4b 100644 --- a/bin/latest_backup.sh +++ b/bin/latest_backup.sh @@ -1,6 +1,4 @@ #!/bin/bash -# Script to retrieve information from latest-backup.txt in S3, get presigned URLs for the files listed, -# and then delete the service key created. # Function to check if the installed version of cf CLI is at least version 8 check_cf_version() { @@ -10,7 +8,7 @@ check_cf_version() { echo "Current cf version ($current_version) is greater than or equal to $minimum_version." >&2 else echo "Current cf version ($current_version) is less than $minimum_version. Please update your cf CLI." >&2 - return 1 # Return 1 to indicate error + exit 1 # Return 1 to indicate error fi } @@ -79,6 +77,27 @@ delete_service_key() { fi } +# Function to delete older service keys +delete_old_service_keys() { + local cf_s3_service_name=$1 + local current_service_key=$1 + local current_time=$(date +%s) + local six_hours_in_seconds=21600 + echo "Deleting older service keys for service instance ${cf_s3_service_name}..." + + cf service-keys "${cf_s3_service_name}" | grep -v $current_service_key | awk 'NR>1 {print $1}' | while read -r key_name; do + if [[ $key_name =~ ^${cf_s3_service_name}-key- ]]; then + local key_creation_time=$(cf service-key "${cf_s3_service_name}" "${key_name}" | grep -oP '(?<=created:\s).+') + local key_creation_timestamp=$(date --date="$key_creation_time" +%s) + local key_age=$((current_time - key_creation_timestamp)) + if (( key_age > six_hours_in_seconds )); then + echo "Deleting old service key ${key_name}..." + cf delete-service-key "${cf_s3_service_name}" "${key_name}" -f + fi + fi + done +} + # Verify AWS Credentials by checking the identity associated with them verify_aws_credentials() { local retries=5 @@ -117,18 +136,175 @@ generate_presigned_urls() { local bucket_name=$1 local files=("$@") local urls=() + for file in "${files[@]:1}"; do - local url=$(aws s3 presign "s3://${file}" --expires-in 3600) - urls+=("$url") + # Check if the file exists in the S3 bucket + if aws s3 ls "s3://${bucket_name}/${file}" >/dev/null 2>&1; then + local url=$(aws s3 presign "s3://${bucket_name}/${file}" --expires-in 3600) + urls+=("$url") + else + echo "Error: File s3://${bucket_name}/${file} does not exist." + exit 1 + fi done + echo "${urls[@]}" } +# Function to list all ZIP files in the same S3 path as the latest backup +list_all_zip_files() { + local bucket_name=$1 + local s3_folder=$2 + local zip_files=$(aws s3 ls "s3://${bucket_name}/${s3_folder}" --recursive | grep '.zip\|.pwd\|.md5\|.sha256') + if [ -z "${zip_files}" ]; then + echo "No ZIP files found in S3 bucket." + else + echo "ZIP files in S3 bucket:" + printf "%-50s %-5s %-5s %-5s %-15s %-5s\n" "Name" "pwd" "md5" "sha256" "size(zip)" "age(days)" + current_date=$(date +%s) + echo "${zip_files}" | \ + while read line; do \ + echo "${line##*.} ${line}";\ + done |\ + sort -rk5 |\ + tr '\n' ' ' | \ + sed 's~ zip ~\nzip ~g' |\ + while read line; do + zip_file=$(echo ${line} | awk '{split($5, a, "/"); print a[length(a)]}'); + has_pwd=$([[ $line == *" pwd "* ]] && echo "x" || echo ""); + has_md5=$([[ $line == *" md5 "* ]] && echo "x" || echo ""); + has_sha256=$([[ $line == *" sha256 "* ]] && echo "x" || echo ""); + zip_size=$(numfmt --to=iec-i --suffix=B $(echo ${line} | awk '{print $4}')); + + # Determine OS and use appropriate date command + if [[ "$OSTYPE" == "darwin"* ]]; then + zip_age=$(( ( $(date +%s) - $(date -j -f "%Y-%m-%d" "$(echo ${line} | awk '{print $2}')" +%s) ) / 86400 )) + else + zip_age=$(( ( $(date +%s) - $(date -d "$(echo ${line} | awk '{print $2}')" +%s) ) / 86400 )) + fi + + printf "%-50s %-5s %-5s %-5s %-15s %-5s\n" "$zip_file" "$has_pwd" "$has_md5" "$has_sha256" "$zip_size" "$zip_age"; + done |\ + sort -k1 + fi +} + +# Function to verify that a file exists in S3 +verify_file_exists() { + local bucket_name=$1 + local file_name=$2 + if aws s3 ls "s3://${bucket_name}/${file_name}" > /dev/null 2>&1; then + return 0 + else + return 1 + fi +} + +# Function to download and verify files +download_and_verify() { + local zip_url=$1 + local zip_file_name=$2 + local password_url=$3 + local md5_url=$4 + local sha256_url=$5 + + # Check if wget is installed + if command -v wget &>/dev/null; then + echo "Using wget to download the file." + wget -O "$zip_file_name" "$zip_url" + else + # If wget is not installed, use curl + echo "wget is not installed. Using curl to download the file." + curl -o "$zip_file_name" "$zip_url" + fi + + # Download password, SHA-256 checksum, and MD5 checksum directly into variables + local password=$(curl -s "$password_url") + local checksum_sha256=$(curl -s "$sha256_url") + local checksum_md5=$(curl -s "$md5_url") + + # Verify SHA-256 checksum + echo "Verifying SHA-256 checksum..." + echo "$checksum_sha256 $zip_file_name" | sha256sum -c + if [ $? -ne 0 ]; then + echo "SHA-256 checksum verification failed." + exit 1 + else + echo "SHA-256 checksum verified." + fi + + # Verify MD5 checksum + echo "Verifying MD5 checksum..." + echo "$checksum_md5 $zip_file_name" | md5sum -c + if [ $? -ne 0 ]; then + echo "MD5 checksum verification failed." + exit 1 + else + echo "MD5 checksum verified." + fi + + # Unzip the file + echo "Unzipping the file..." + unzip -P "$password" "$zip_file_name" + if [ $? -eq 0 ]; then + echo "File unzipped successfully." + + # Rename the extracted file + extracted_file="-" + new_name="${zip_file_name%.zip}" + mv "$extracted_file" "$new_name" + if [ $? -eq 0 ]; then + echo "File renamed to $new_name." + else + echo "Failed to rename the file." + exit 1 + fi + else + echo "Failed to unzip the file." + exit 1 + fi +} + +# Function to erase a set of files from S3 +erase_files() { + local bucket_name=$1 + local s3_folder=$2 + local zip_file=$3 + + local pwd_file="${zip_file%.zip}.pwd" + local md5_file="${zip_file%.zip}.md5" + local sha256_file="${zip_file%.zip}.sha256" + + local files_to_delete=("$zip_file" "$pwd_file" "$md5_file" "$sha256_file") + + echo "Deleting files from S3:" + for file in "${files_to_delete[@]}"; do + local file_path="${s3_folder}/${file}" + if aws s3 ls "s3://${bucket_name}/${file_path}" > /dev/null 2>&1; then + echo "Deleting ${file_path}..." + if aws s3 rm "s3://${bucket_name}/${file_path}"; then + echo "${file_path} deleted successfully." + else + echo "Failed to delete ${file_path}." + exit 9 + fi + else + echo "${file_path} does not exist, skipping deletion." + fi + done +} + + # Function to retrieve and use S3 service credentials fetch_latest_backup_info_and_cleanup() { - local cf_s3_service_name="${1:-ttahub-db-backups}" # Default to 'db-backups' if not provided - local s3_folder="${2:-production}" # Default to root of the bucket if not provided - local deletion_allowed="${3:-no}" # Default to no deletion if not provided + local cf_s3_service_name="${cf_s3_service_name:-ttahub-db-backups}" # Default to 'db-backups' if not provided + local s3_folder="${s3_folder:-production}" # Default to root of the bucket if not provided + local deletion_allowed="${deletion_allowed:-no}" # Default to no deletion if not provided + local list_zip_files="${list_zip_files:-no}" # Default to no listing of ZIP files if not provided + local specific_file="${specific_file:-}" + local download_and_verify="${download_and_verify:-no}" + local erase_file="${erase_file:-}" + local delete_old_keys="${delete_old_keys:-no}" # Generate a unique service key name using UUID local key_name="${cf_s3_service_name}-key-$(uuidgen)" @@ -151,26 +327,52 @@ fetch_latest_backup_info_and_cleanup() { export AWS_DEFAULT_REGION="$aws_default_region" verify_aws_credentials - local latest_backup_file_path=$(find_latest_backup_file_path "$bucket_name" "$s3_folder") + if [ "${delete_old_keys}" = "yes" ]; then + delete_old_service_keys "$cf_s3_service_name" "$key_name" + elif [ "${erase_file}" != "" ]; then + # Erase the specified file along with its corresponding pwd, md5, and sha256 files + erase_files "$bucket_name" "$s3_folder" "$erase_file" + elif [ "${list_zip_files}" = "yes" ]; then + # List all ZIP files if the option is enabled + list_all_zip_files "$bucket_name" "$s3_folder" + else + if [ -n "$specific_file" ]; then + backup_file_name="${s3_folder}/${specific_file}" + if ! verify_file_exists "$bucket_name" "$backup_file_name"; then + echo "Specified file does not exist in S3 bucket." + exit 8 + fi + else + local latest_backup_file_path=$(find_latest_backup_file_path "$bucket_name" "$s3_folder") + + # Download and read the latest-backup.txt file using the full path + aws s3 cp "s3://${bucket_name}/${latest_backup_file_path}" /tmp/latest-backup.txt - # Download and read the latest-backup.txt file using the full path - aws s3 cp "s3://${bucket_name}/${latest_backup_file_path}" /tmp/latest-backup.txt + # Extract the names of the latest backup and password files + local backup_file_name=$(awk 'NR==1' /tmp/latest-backup.txt) + backup_file_name=${backup_file_name#"$bucket_name/"} + fi - # Extract the names of the latest backup and password files - local backup_file_name=$(awk 'NR==1' /tmp/latest-backup.txt) - local md5_file_name=$(awk 'NR==2' /tmp/latest-backup.txt) - local sha256_file_name=$(awk 'NR==3' /tmp/latest-backup.txt) - local password_file_name=$(awk 'NR==4' /tmp/latest-backup.txt) + local md5_file_name="${backup_file_name%.zip}.md5" + local sha256_file_name="${backup_file_name%.zip}.sha256" + local password_file_name="${backup_file_name%.zip}.pwd" - # Generate presigned URLs for these files - local urls=$(generate_presigned_urls "$bucket_name" "$backup_file_name" "$password_file_name" "$md5_file_name" "$sha256_file_name") + # Generate presigned URLs for these files + local urls + IFS=' ' read -r -a urls <<< "$(generate_presigned_urls "$bucket_name" "$backup_file_name" "$password_file_name" "$md5_file_name" "$sha256_file_name")" - # Print presigned URLs - echo "Presigned URLs for the files:" - for url in ${urls[@]}; do - echo "$url" - echo "" - done + if [ "${download_and_verify}" = "yes" ]; then + # Perform download and verify functionality + download_and_verify "${urls[0]}" "$(basename "$backup_file_name")" "${urls[1]}" "${urls[2]}" "${urls[3]}" + else + # Print presigned URLs + echo "Presigned URLs for the files:" + for url in "${urls[@]}"; do + echo "$url" + echo "" + done + fi + fi # Clean up by deleting the service key delete_service_key "$cf_s3_service_name" "$key_name" "$deletion_allowed" @@ -178,17 +380,27 @@ fetch_latest_backup_info_and_cleanup() { check_cf_version -# Main execution block -if [ "$#" -gt 3 ]; then - echo "Usage: $0 [ [ []]]" - exit 1 -fi +while [[ "$#" -gt 0 ]]; do + case $1 in + -n|--service-name) cf_s3_service_name="$2"; shift ;; + -s|--s3-folder) s3_folder="$2"; shift ;; + -a|--allow-deletion) deletion_allowed="yes" ;; + -l|--list-zip-files) list_zip_files="yes" ;; + -f|--specific-file) specific_file="$2"; shift ;; + -d|--download-and-verify) download_and_verify="yes"; deletion_allowed="yes" ;; + -e|--erase-file) erase_file="$2"; shift ;; + -k|--delete-old-keys) delete_old_keys="yes" ;; + -h|--help) echo "Usage: $0 [-n | --service-name ] [-s | --s3-folder ] [-a | --allow-deletion] [-l | --list-zip-files] [-f | --specific-file ] [-d | --download-and-verify] [-e | --erase-file ] [-k | --delete-old-keys]"; exit 0 ;; + *) echo "Unknown parameter passed: $1"; exit 12 ;; + esac + shift +done # Check for required dependencies (cf CLI and AWS CLI) if ! type cf >/dev/null 2>&1 || ! type aws >/dev/null 2>&1; then echo "Error: Make sure both Cloud Foundry CLI and AWS CLI are installed." - exit 1 + exit 12 fi # Fetch the latest backup information, generate URLs, and clean up the service key -fetch_latest_backup_info_and_cleanup "$1" "$2" "$3" +fetch_latest_backup_info_and_cleanup diff --git a/config/config.js b/config/config.js index c16548d22c..7770fcc3d4 100644 --- a/config/config.js +++ b/config/config.js @@ -4,6 +4,8 @@ const singleLineLogger = ( queryString, ) => console.log(queryString.replace(/\n/g, '\\n')); // eslint-disable-line no-console +const suppressSuccessMessage = process.env.SUPPRESS_SUCCESS_MESSAGE === 'true'; + const connectionValidation = async (connection) => { try { /* @@ -28,8 +30,11 @@ const connectionValidation = async (connection) => { }; const result = await connection.query(queryConfig); - // eslint-disable-next-line no-console - console.info('Connection validated successfully'); + + if (!suppressSuccessMessage) { + // eslint-disable-next-line no-console + console.info('Connection validated successfully'); + } return !!result; } catch (error) { // eslint-disable-next-line no-console diff --git a/frontend/package.json b/frontend/package.json index c70a424147..67f8feb690 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -30,8 +30,8 @@ "moment": "^2.29.4", "moment-timezone": "^0.5.35", "nth-check": "^2.0.1", - "plotly.js": "^2.25.2", - "plotly.js-basic-dist": "^2.2.1", + "plotly.js": "^2.34.0", + "plotly.js-basic-dist": "^2.34.0", "prop-types": "^15.7.2", "query-string": "^7.0.0", "react": "^17.0.1", diff --git a/frontend/src/components/DisplayTableToggleButton.js b/frontend/src/components/DisplayTableToggleButton.js new file mode 100644 index 0000000000..8b97cb59d8 --- /dev/null +++ b/frontend/src/components/DisplayTableToggleButton.js @@ -0,0 +1,24 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import { uniqueId } from 'lodash'; + +export default function DisplayTableToggle({ displayTable, setDisplayTable, title }) { + return ( + + ); +} + +DisplayTableToggle.propTypes = { + displayTable: PropTypes.bool.isRequired, + setDisplayTable: PropTypes.func.isRequired, + title: PropTypes.string.isRequired, +}; diff --git a/frontend/src/components/WidgetContainer.js b/frontend/src/components/WidgetContainer.js index 3e62ab7b81..f86a55eadd 100644 --- a/frontend/src/components/WidgetContainer.js +++ b/frontend/src/components/WidgetContainer.js @@ -28,6 +28,8 @@ export default function WidgetContainer( enableCheckboxes, exportRows, footNote, + displayTable, + setDisplayTable, }, ) { return ( @@ -37,6 +39,8 @@ export default function WidgetContainer( title={title} subtitle={subtitle} showHeaderBorder={showHeaderBorder} + displayTable={displayTable} + setDisplayTable={setDisplayTable} pagination={showPagingTop ? ( {showPagingBottom || footNote ? ( -
+
{footNote && (

{footNote} @@ -104,6 +108,8 @@ WidgetContainer.propTypes = { enableCheckboxes: PropTypes.bool, exportRows: PropTypes.func, footNote: PropTypes.string, + displayTable: PropTypes.bool, + setDisplayTable: PropTypes.func, }; WidgetContainer.defaultProps = { @@ -116,13 +122,15 @@ WidgetContainer.defaultProps = { currentPage: 0, offset: 0, perPage: 10, - handlePageChange: () => { }, + handlePageChange: null, showHeaderBorder: true, error: null, titleSlot: null, loadingLabel: 'Loading', className: '', enableCheckboxes: false, - exportRows: () => {}, + exportRows: null, footNote: null, + displayTable: false, + setDisplayTable: null, }; diff --git a/frontend/src/components/WidgetContainer.scss b/frontend/src/components/WidgetContainer.scss index 851db65986..be22c53d91 100644 --- a/frontend/src/components/WidgetContainer.scss +++ b/frontend/src/components/WidgetContainer.scss @@ -1,17 +1,7 @@ -@use '../colors.scss' as *; - -.smart-hub-widget-container .smart-hub-widget-container-header-border { - border-bottom: 1px solid $base-lighter; -} - .smart-hub-widget-container .smart-hub-widget-container-header p { max-width: 700px; - } - -.smart-hub-widget-container .smart-hub-widget-container-footer { - border-top: 1px solid $base-lighter; - } +} - .smart-hub-widget-container .smart-hub--menu-button .fa-ellipsis { - font-size: 1.5rem; - } \ No newline at end of file +.smart-hub-widget-container .smart-hub--menu-button .fa-ellipsis { + font-size: 1.5rem; +} \ No newline at end of file diff --git a/frontend/src/components/WidgetContainerTitleGroup.js b/frontend/src/components/WidgetContainerTitleGroup.js index c1e0b15fe7..338b04374d 100644 --- a/frontend/src/components/WidgetContainerTitleGroup.js +++ b/frontend/src/components/WidgetContainerTitleGroup.js @@ -1,6 +1,7 @@ import React from 'react'; import PropTypes from 'prop-types'; import ContextMenu from './ContextMenu'; +import DisplayTableToggle from './DisplayTableToggleButton'; const WidgetContainerTitleGroup = ({ children, @@ -11,6 +12,8 @@ const WidgetContainerTitleGroup = ({ pagination, enableCheckboxes, exportRows, + displayTable, + setDisplayTable, }) => { if (!title) { return null; @@ -32,7 +35,7 @@ const WidgetContainerTitleGroup = ({ ] : []; return ( -

+

{title}

@@ -41,6 +44,13 @@ const WidgetContainerTitleGroup = ({ {children}
+ {setDisplayTable && ( + + )} { (menuItems.length > 0 && ( { it('hides header border', async () => { renderWidgetContainer('Widget container header', null, true, () => {}, null, false); const containerElement = screen.getByRole('heading', { name: /widget container header/i }).parentElement; - expect(containerElement).not.toHaveClass('smart-hub-widget-container-header-border'); + expect(containerElement).not.toHaveClass('ttahub-border-base-lighter'); }); it('call exportRows with the correct values', async () => { diff --git a/frontend/src/pages/Admin/TrainingReports.js b/frontend/src/pages/Admin/TrainingReports.js index 7c5b4764b9..dd0b6aa85b 100644 --- a/frontend/src/pages/Admin/TrainingReports.js +++ b/frontend/src/pages/Admin/TrainingReports.js @@ -1,5 +1,6 @@ import React from 'react'; import CsvImport from './components/CsvImport'; +import Req from '../../components/Req'; function TrainingReports() { const primaryIdColumn = 'Event ID'; @@ -8,12 +9,143 @@ function TrainingReports() { const requiredCsvHeaders = [ 'Event ID', - 'Edit Title', - 'Creator', + 'Event Title', + 'IST/Creator', ]; return ( <> +
+ Instructions for CSV + +
+

+ Column names and values need to match exactly. Required columns are marked with + {' '} + + . +

+
    +
  • + + Event ID + + : + {' '} + Single line text value + +
  • +
  • + + Event Title + + : + {' '} + Single line text value + +
  • +
  • + + IST/Creator + + : + {' '} + Single line text value, user email address + +
  • +
  • + + Event Organizer - Type of Event + + : + {' '} + One of + {' '} + Regional PD Event (with National Centers) + {' '} + or + {' '} + IST TTA/Visit +
  • +
  • + + National Centers + + : + {' '} + A three or four digit national center identifier. + This will find and attach the associated user as a collaborator on the event. +
  • +
  • + + Event Duration + + : + {' '} + One of + {' '} + 1 day or less + {' '} + or + {' '} + Multi-Day single event + or + {' '} + Series +
  • +
  • + + Reason(s) for PD + + : + {' '} + A list of reasons, separated by new lines. + Any reasons not matching one of the reasons we expect will be ignored. +
  • +
  • + + Vision/Goal/Outcomes for the PD Event + + : + {' '} + A free text field. Formatting other than spaces or new lines will not be preserved. +
  • +
  • + + Target Population(s) + + : + {' '} + A list of populations, separated by new lines. + Any reasons not matching one of the target populations we expect will be ignored. +
  • +
  • + + Audience + + : + {' '} + One of + {' '} + Regional office/TTA + {' '} + or + {' '} + Recipients +
  • +
  • + + Designated Region POC for Event/Request + + : + {' '} + A list of Hub user's names, seperated by the "/" character. + Any names not matching an existing user will be ignored. +
  • +
+
+ +
{ renderResourcesDashboard(user); expect(await screen.findByText(/resource dashboard/i)).toBeVisible(); + const button = await screen.findByRole('button', { name: /Display Resource use as table/i }); + act(() => { + userEvent.click(button); + }); + // Overview (initial). expect(screen.getByText(/40.85%/i)).toBeInTheDocument(); expect(screen.getAllByText(/^[ \t]*reports with resources[ \t]*$/i)[0]).toBeInTheDocument(); diff --git a/frontend/src/pages/TrainingReportForm/pages/eventSummary.js b/frontend/src/pages/TrainingReportForm/pages/eventSummary.js index e804ed1f9b..e25c02020f 100644 --- a/frontend/src/pages/TrainingReportForm/pages/eventSummary.js +++ b/frontend/src/pages/TrainingReportForm/pages/eventSummary.js @@ -385,6 +385,8 @@ const EventSummary = ({ additionalData, datePickerKey }) => { + +
diff --git a/frontend/src/widgets/FrequencyGraph.js b/frontend/src/widgets/FrequencyGraph.js index a91ee196dd..0781313874 100644 --- a/frontend/src/widgets/FrequencyGraph.js +++ b/frontend/src/widgets/FrequencyGraph.js @@ -6,6 +6,7 @@ import withWidgetData from './withWidgetData'; import Container from '../components/Container'; import AccessibleWidgetData from './AccessibleWidgetData'; import BarGraph from './BarGraph'; +import DisplayTableToggle from '../components/DisplayTableToggleButton'; import './FrequencyGraph.css'; function sortData(data, isTabular = false) { @@ -37,11 +38,6 @@ export function FreqGraph({ data, loading }) { const columnHeadings = HEADINGS[selectedGraph]; const toggleGraphLabel = selectedGraph === TOPIC_STR ? REASON_STR : TOPIC_STR; - // toggle the data table - function toggleAccessibleData() { - updateShowAccessibleData((current) => !current); - } - function toggleSelectedGraph() { updateSelectedGraph((current) => (current === TOPIC_STR ? REASON_STR : TOPIC_STR)); } @@ -67,14 +63,10 @@ export function FreqGraph({ data, loading }) { - + { showAccessibleData diff --git a/frontend/src/widgets/GoalStatusGraph.js b/frontend/src/widgets/GoalStatusGraph.js index d47f795259..46599b6916 100644 --- a/frontend/src/widgets/GoalStatusGraph.js +++ b/frontend/src/widgets/GoalStatusGraph.js @@ -8,6 +8,7 @@ import Container from '../components/Container'; import AccessibleWidgetData from './AccessibleWidgetData'; import colors from '../colors'; import VanillaModal from '../components/VanillaModal'; +import DisplayTableToggle from '../components/DisplayTableToggleButton'; const GOAL_STATUSES = [ 'Not started', @@ -86,11 +87,6 @@ export function GoalStatusChart({ data, loading }) { setBars(newBars); }, [data]); - // toggle the data table - function toggleAccessibleData() { - updateShowAccessibleData((current) => !current); - } - if (!data) { return null; } @@ -104,14 +100,11 @@ export function GoalStatusChart({ data, loading }) { - + diff --git a/frontend/src/widgets/ResourceUse.js b/frontend/src/widgets/ResourceUse.js index cfe24cfd48..f2758eb57c 100644 --- a/frontend/src/widgets/ResourceUse.js +++ b/frontend/src/widgets/ResourceUse.js @@ -1,9 +1,12 @@ -import React from 'react'; +import React, { useState } from 'react'; import PropTypes from 'prop-types'; import HorizontalTableWidget from './HorizontalTableWidget'; import WidgetContainer from '../components/WidgetContainer'; +import ResourceUseSparklineGraph from './ResourceUseSparklineGraph'; function ResourceUse({ data, loading }) { + const [displayTable, setDisplayTable] = useState(false); + return ( + {displayTable && ( ({ ...d, heading: d.title || d.heading, link: d.heading }))} + data={data.resources.map((d) => ( + { ...d, heading: d.title || d.heading, link: d.heading }))} firstHeading="Resource URL" /> + )} + + {(!displayTable) && ()} + ); } diff --git a/frontend/src/widgets/ResourceUseSparkline.js b/frontend/src/widgets/ResourceUseSparkline.js new file mode 100644 index 0000000000..46d1a1712b --- /dev/null +++ b/frontend/src/widgets/ResourceUseSparkline.js @@ -0,0 +1,91 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import Plotly from 'plotly.js-basic-dist'; +import { DECIMAL_BASE } from '@ttahub/common'; +import createPlotlyComponent from 'react-plotly.js/factory'; +import colors from '../colors'; + +const Graph = createPlotlyComponent(Plotly); + +export default function ResourceUseSparkline({ dataPoints }) { + const titles = []; + const values = []; + + dataPoints.data.forEach((dataPoint) => { + if (dataPoint.title !== 'Total') { + titles.push(dataPoint.title); + values.push(parseInt(dataPoint.value, DECIMAL_BASE)); + } + }); + + const width = 68 * titles.length; + + // the color of the bar with the highest value is orange, the other are blue + const maxIndex = values.indexOf(Math.max(...values)); + // eslint-disable-next-line max-len + const color = values.map((_v, i) => (i === maxIndex ? colors.ttahubOrange : colors.ttahubMediumBlue)); + + const trace = { + type: 'bar', + x: titles, + y: values, + marker: { + color, + }, + hoverlabel: { + font: { color: '#ffffff', size: '16' }, + bgcolor: colors.textInk, + }, + }; + + const layout = { + bargap: 0.05, + height: 37.75, + width, + barcornerradius: 2, + margin: { + l: 0, + pad: 0, + t: 12, + b: 0, + r: 0, + }, + xaxis: { + visible: false, + }, + yaxis: { + visible: false, + }, + }; + + const config = { + responsive: true, + displayModeBar: false, + hovermode: 'none', + }; + + return ( +
+ +
+ ); +} + +ResourceUseSparkline.propTypes = { + dataPoints: PropTypes.shape({ + data: PropTypes.arrayOf(PropTypes.shape({ + title: PropTypes.string, + value: PropTypes.string, + })), + heading: PropTypes.string, + isUrl: PropTypes.bool, + sortBy: PropTypes.string, + title: PropTypes.string, + total: PropTypes.string, + url: PropTypes.string, + }).isRequired, +}; diff --git a/frontend/src/widgets/ResourceUseSparklineGraph.js b/frontend/src/widgets/ResourceUseSparklineGraph.js new file mode 100644 index 0000000000..4da6d01286 --- /dev/null +++ b/frontend/src/widgets/ResourceUseSparklineGraph.js @@ -0,0 +1,121 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import { uniqueId } from 'lodash'; +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +import { faArrowUpRightFromSquare } from '@fortawesome/free-solid-svg-icons'; +import ResourceUseSparkline from './ResourceUseSparkline'; +import colors from '../colors'; +import './ResourceUseSparklineGraph.scss'; + +const ResourceLink = ({ url, title }) => { + if (!url) { + return title || ''; + } + + return ( + + ); +}; + +ResourceLink.propTypes = { + title: PropTypes.string, + url: PropTypes.string, +}; + +ResourceLink.defaultProps = { + title: '', + url: '', +}; + +function LegendIndicator({ color }) { + return ( +
+ ); +} + +LegendIndicator.propTypes = { + color: PropTypes.string.isRequired, +}; + +export default function ResourceUseSparklineGraph({ data }) { + const headings = data.headers.map((header) => {header}); + + return ( + <> +
+ + Activity reports citing resource + + Highest count during date range +
+
+
+ Resource URL +
+ {headings} +
+ + Total + +
+
+
+ {data.resources.map((resource) => ( + + ))} +
+ +
+ {data.resources.map((resource) => ( + + ))} +
+ + + {data.resources.map((resource) => ({resource.total}))} + + +
+
+ + ); +} + +ResourceUseSparklineGraph.propTypes = { + data: PropTypes.shape({ + headers: PropTypes.arrayOf(PropTypes.string), + resources: PropTypes.arrayOf(PropTypes.shape({ + data: PropTypes.arrayOf(PropTypes.shape({ + title: PropTypes.string, + value: PropTypes.string, + })), + heading: PropTypes.string, + isUrl: PropTypes.bool, + sortBy: PropTypes.string, + title: PropTypes.string, + total: PropTypes.string, + url: PropTypes.string, + })), + }), +}; + +ResourceUseSparklineGraph.defaultProps = { + data: { + headers: [], + resources: [], + }, +}; diff --git a/frontend/src/widgets/ResourceUseSparklineGraph.scss b/frontend/src/widgets/ResourceUseSparklineGraph.scss new file mode 100644 index 0000000000..e8c76223e4 --- /dev/null +++ b/frontend/src/widgets/ResourceUseSparklineGraph.scss @@ -0,0 +1,52 @@ +@use '../colors.scss' as colors; + +.ttahub-resource-use-sparkline--legend { + gap: .5em; +} +.ttahub-resource-use-sparkline--legend__item--blue { + background: colors.$ttahub-medium-blue; +} + +.ttahub-resource-use-sparkline--legend__item--orange { + background: colors.$ttahub-orange; +} + +.ttahub-resource-use-sparkline__heading-group, +.ttahub-resource-use-sparkline__graphs { + display: grid; + grid-template-columns: 200px 1fr 90px; + max-width: 100%; +} + + +@media(min-width: 800px) { + .ttahub-resource-use-sparkline__heading-group, + .ttahub-resource-use-sparkline__graphs { + grid-template-columns: 330px 1fr 90px; + } +} + +.ttahub-resource-use-sparkline__heading-group { + grid-template-rows: 3em; + align-items: center; +} + +span.ttahub-resource-use-sparkline__resource-heading { + border-right-color: colors.$text-ink; +} + +span.ttahub-resource-use-sparkline__resource-heading-total { + border-left-color: colors.$text-ink; +} + +.ttahub-resource-use-sparkline__graphs { + grid-template-rows: auto; +} + +.ttahub-resource-use-sparkline__heading { + width: 68px; +} + +.ttahub-resource-use-sparkline-resource-title { + max-width: 30ch; +} diff --git a/frontend/src/widgets/TopicFrequencyGraph.js b/frontend/src/widgets/TopicFrequencyGraph.js index c1bbc92268..695b9c4f4b 100644 --- a/frontend/src/widgets/TopicFrequencyGraph.js +++ b/frontend/src/widgets/TopicFrequencyGraph.js @@ -8,6 +8,7 @@ import AccessibleWidgetData from './AccessibleWidgetData'; import ButtonSelect from '../components/ButtonSelect'; import colors from '../colors'; import MediaCaptureButton from '../components/MediaCaptureButton'; +import DisplayTableToggle from '../components/DisplayTableToggleButton'; export const SORT_ORDER = { DESC: 1, @@ -150,11 +151,6 @@ export function TopicFrequencyGraphWidget({ setOrder(selected.value); }; - // toggle the data table - function toggleType() { - setShowAccessibleData(!showAccessibleData); - } - return ( @@ -199,16 +195,11 @@ export function TopicFrequencyGraphWidget({ /> ) : null} - + diff --git a/frontend/src/widgets/VBarGraph.js b/frontend/src/widgets/VBarGraph.js index 5d4dd3e903..352412ddcb 100644 --- a/frontend/src/widgets/VBarGraph.js +++ b/frontend/src/widgets/VBarGraph.js @@ -11,6 +11,7 @@ import MediaCaptureButton from '../components/MediaCaptureButton'; import WidgetH2 from '../components/WidgetH2'; import useSize from '../hooks/useSize'; import './VBarGraph.css'; +import DisplayTableToggle from '../components/DisplayTableToggleButton'; const Plot = createPlotlyComponent(Plotly); @@ -26,10 +27,6 @@ function VBarGraph({ const [plot, updatePlot] = useState({}); const bars = useRef(null); const [showAccessibleData, updateShowAccessibleData] = useState(false); - // toggle the data table - function toggleAccessibleData() { - updateShowAccessibleData((current) => !current); - } const size = useSize(bars); @@ -134,14 +131,11 @@ function VBarGraph({ /> ) : null} - +
diff --git a/frontend/src/widgets/__tests__/GoalStatusGraph.js b/frontend/src/widgets/__tests__/GoalStatusGraph.js index f2e42bbfcd..3455faca54 100644 --- a/frontend/src/widgets/__tests__/GoalStatusGraph.js +++ b/frontend/src/widgets/__tests__/GoalStatusGraph.js @@ -38,7 +38,7 @@ describe('GoalStatusChart', () => { it('switches to accessible data', async () => { renderGoalStatusChart(testData); - const button = await screen.findByRole('button', { name: /display goal statuses by number as a table/i }); + const button = await screen.findByRole('button', { name: /display goal statuses/i }); userEvent.click(button); expect(await screen.findByRole('columnheader', { name: /status/i })).toBeVisible(); }); diff --git a/frontend/src/widgets/__tests__/ResourceUse.js b/frontend/src/widgets/__tests__/ResourceUse.js index 83a704456f..3213fa8f46 100644 --- a/frontend/src/widgets/__tests__/ResourceUse.js +++ b/frontend/src/widgets/__tests__/ResourceUse.js @@ -1,6 +1,7 @@ import '@testing-library/jest-dom'; import React from 'react'; -import { render, screen } from '@testing-library/react'; +import { render, screen, act } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; import ResourceUse from '../ResourceUse'; const testData = { @@ -89,6 +90,12 @@ describe('Resource Use Widget', () => { const data = { headers: ['Jan-22', 'Feb-22', 'Mar-22'], resources: [] }; renderResourceUse(data); + const button = await screen.findByRole('button', { name: /Display Resource use as table/i }); + + act(() => { + userEvent.click(button); + }); + expect(screen.getByText(/Resource use/i)).toBeInTheDocument(); expect(screen.getByText(/Showing the 10 resources cited most often on Activity Reports/i)).toBeInTheDocument(); expect(screen.getByText(/Resource URL/i)).toBeInTheDocument(); @@ -101,6 +108,11 @@ describe('Resource Use Widget', () => { it('renders correctly with data', async () => { renderResourceUse(testData); + const button = await screen.findByRole('button', { name: /Display Resource use as table/i }); + act(() => { + userEvent.click(button); + }); + expect(screen.getByText(/Resource use/i)).toBeInTheDocument(); expect(screen.getByText(/Showing the 10 resources cited most often on Activity Reports/i)).toBeInTheDocument(); expect(screen.getByText(/Resource URL/i)).toBeInTheDocument(); diff --git a/frontend/src/widgets/__tests__/ResourceUseSparklineGraph.js b/frontend/src/widgets/__tests__/ResourceUseSparklineGraph.js new file mode 100644 index 0000000000..77c6eecefe --- /dev/null +++ b/frontend/src/widgets/__tests__/ResourceUseSparklineGraph.js @@ -0,0 +1,155 @@ +import '@testing-library/jest-dom'; +import React from 'react'; +import { render, screen, act } from '@testing-library/react'; +import ResourceUseSparklineGraph from '../ResourceUseSparklineGraph'; + +const testData = { + headers: ['Jan-22', 'Feb-22', 'Mar-22'], + resources: [ + { + heading: 'https://eclkc.ohs.acf.hhs.gov/school-readiness/effective-practice-guides/effective-practice-guides', + url: 'https://eclkc.ohs.acf.hhs.gov/school-readiness/effective-practice-guides/effective-practice-guides', + isUrl: true, + data: [ + { + title: 'Jan-22', + value: '17', + }, + { + title: 'Feb-22', + value: '18', + }, + { + title: 'Mar-22', + value: '19', + }, + { + title: 'total', + value: '20', + }, + ], + }, + { + heading: 'https://eclkc.ohs.acf.hhs.gov/school-readiness/effective-practice-guides/effective-practice-guides', + title: 'ECLKC Sample Title Test', + isUrl: true, + data: [ + { + title: 'Jan-22', + value: '17', + }, + { + title: 'Feb-22', + value: '18', + }, + { + title: 'Mar-22', + value: '19', + }, + { + title: 'total', + value: '20', + }, + ], + }, + { + heading: 'https://eclkc.ohs.acf.hhs.gov/school-readiness/effective-practice-guides/effective-practice-guides', + url: 'https://eclkc.ohs.acf.hhs.gov/school-readiness/effective-practice-guides/effective-practice-guides', + title: 'ECLKC Sample Title Test', + isUrl: true, + data: [ + { + title: 'Jan-22', + value: '17', + }, + { + title: 'Feb-22', + value: '18', + }, + { + title: 'Mar-22', + value: '19', + }, + { + title: 'total', + value: '20', + }, + ], + }, + { + heading: 'https://test1.gov', + url: 'https://test1.gov', + isUrl: true, + data: [ + { + title: 'Jan-22', + value: '21', + }, + { + title: 'Feb-22', + value: '22', + }, + { + title: 'Mar-22', + value: '23', + }, + { + title: 'total', + value: '24', + }, + ], + }, + { + heading: 'Non URL', + isUrl: false, + data: [ + { + title: 'Jan-22', + value: '25', + }, + { + title: 'Feb-22', + value: '26', + }, + { + title: 'Mar-22', + value: '27', + }, + { + title: 'total', + value: '28', + }, + ], + }, + ], +}; + +const renderResourceUseSparklineGraph = (data) => { + render( + , + ); +}; + +describe('ResourceUseSparklineGraph', () => { + it('renders correctly without data', async () => { + const data = { headers: ['Jan-22', 'Feb-22', 'Mar-22'], resources: [] }; + act(() => { + renderResourceUseSparklineGraph(data); + }); + + expect(screen.getByText(/Activity reports citing resource/i)).toBeInTheDocument(); + expect(screen.getByText(/Highest count during date range/i)).toBeInTheDocument(); + + expect(document.querySelector('svg')).toBe(null); + }); + + it('renders correctly with data', async () => { + expect(() => { + act(() => { + renderResourceUseSparklineGraph(testData); + }); + }).not.toThrow(); + }); +}); diff --git a/frontend/src/widgets/__tests__/TotalHrsAndRecipientGraph.js b/frontend/src/widgets/__tests__/TotalHrsAndRecipientGraph.js index 47863284e3..7420cb5c12 100644 --- a/frontend/src/widgets/__tests__/TotalHrsAndRecipientGraph.js +++ b/frontend/src/widgets/__tests__/TotalHrsAndRecipientGraph.js @@ -2,7 +2,12 @@ /* eslint-disable jest/no-disabled-tests */ import '@testing-library/jest-dom'; import React from 'react'; -import { fireEvent, render, screen } from '@testing-library/react'; +import { + fireEvent, + render, + screen, + act, +} from '@testing-library/react'; import { TotalHrsAndRecipientGraph, LegendControl } from '../TotalHrsAndRecipientGraph'; const TEST_DATA_MONTHS = [ @@ -43,61 +48,25 @@ const renderTotalHrsAndRecipientGraph = async (props) => ( describe('Total Hrs And Recipient Graph Widget', () => { it('shows the correct month data', async () => { - renderTotalHrsAndRecipientGraph({ data: TEST_DATA_MONTHS }); - - const graphTitle = screen.getByRole('heading', { name: /total tta hours/i }); - expect(graphTitle).toBeInTheDocument(); - expect(document.querySelector('svg')).toBeInTheDocument(); - - // Get Trace Nodes. - const nodes = document.querySelectorAll('.plot .scatterlayer'); - - // Verify Number of Traces. - expect(nodes[0].childNodes.length).toEqual(3); - - // Verify Number of 'Recipient Rec TTA' Trace Points. - // await expect(nodes[0].childNodes[0].childNodes[3].childNodes.length).toEqual(6); - - // Verify Number of 'Hours of Training' Trace Points. - expect(nodes[0].childNodes[0].childNodes[3].childNodes.length).toEqual(6); - - // Verify Number of 'Hours of Technical Assistance' Trace Points. - expect(nodes[0].childNodes[1].childNodes[3].childNodes.length).toEqual(6); - - // Verify Number of 'Hours of Both' Trace Points. - expect(nodes[0].childNodes[2].childNodes[3].childNodes.length).toEqual(6); + expect(() => { + act(() => { + renderTotalHrsAndRecipientGraph({ data: TEST_DATA_MONTHS }); + }); + }).not.toThrow(); + + const svgGraph = document.querySelector('.plot-container.plotly svg'); + expect(svgGraph).toBeInTheDocument(); }); it('shows the correct day data', async () => { - renderTotalHrsAndRecipientGraph({ data: TEST_DATA_DAYS }); - - const graphTitle = screen.getByRole('heading', { name: /total tta hours/i }); - expect(graphTitle).toBeInTheDocument(); - expect(document.querySelector('svg')).toBeInTheDocument(); - - // Get Trace Nodes. - const nodes = document.querySelectorAll('.plot .scatterlayer'); - - // Verify Number of Traces. - expect(nodes[0].childNodes.length).toEqual(3); - - // Verify Number of 'Recipient Rec TTA' Trace Points. - expect(nodes[0].childNodes[0].childNodes[3].childNodes.length).toEqual(4); - - // Verify Number of 'Hours of Training' Trace Points. - expect(nodes[0].childNodes[1].childNodes[3].childNodes.length).toEqual(4); - - // Verify Number of 'Hours of Technical Assistance' Trace Points. - expect(nodes[0].childNodes[2].childNodes[3].childNodes.length).toEqual(4); - - expect(document.querySelectorAll('.plot .scatterlayer .point').length).toBe(12); - const training = screen.getByRole('checkbox', { name: /training/i, hidden: true }); - - fireEvent.click(training); - expect(document.querySelectorAll('.plot .scatterlayer .point').length).toBe(8); - - fireEvent.click(training); - expect(document.querySelectorAll('.plot .scatterlayer .point').length).toBe(12); + expect(() => { + act(() => { + renderTotalHrsAndRecipientGraph({ data: TEST_DATA_DAYS }); + }); + }).not.toThrow(); + + const svgGraph = document.querySelector('.plot-container.plotly svg'); + expect(svgGraph).toBeInTheDocument(); }); it('handles undefined data', async () => { @@ -164,24 +133,12 @@ describe('Total Hrs And Recipient Graph Widget', () => { }, { name: 'Hours of Both', x: ['Sep-20', 'Oct-20', 'Nov-20', 'Dec-20', 'Jan-21', 'Feb-21', 'Mar-21', 'Apr-21', 'May-21', 'Jun-21', 'Jul-21', 'Aug-21', 'Sep-21', 'Oct-21', 'Nov-21', 'Dec-21', 'Jan-22', 'Feb-22', 'Mar-22', 'Apr-22', 'May-22', 'Jun-22', 'Jul-22', 'Aug-22', 'Sep-22'], y: [55, 134.5, 173, 137.5, 190, 248.8, 234.3, 230, 193.5, 187.5, 200.5, 202.5, 224.5, 299.5, 155, 206.5, 209.5, 251.5, 234, 206, 235.5, 245, 279.5, 274.5, 155.5], month: [false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false], }]; - renderTotalHrsAndRecipientGraph({ data: largeDataSet }); - - // confirm the number of labels - const labels = document.querySelectorAll('.xaxislayer-above .xtick text'); - expect(labels.length).toBe(5); - // confirm the label content - const labelText = Array.from(labels).map((label) => label.textContent); - expect(labelText).toEqual(['Sep-20', 'Mar-21', 'Sep-21', 'Mar-22', 'Sep-22']); + act(() => { + renderTotalHrsAndRecipientGraph({ data: largeDataSet }); + }); - // confirm the number of points - const traces = document.querySelectorAll('.plot .scatterlayer .trace.scatter'); - expect(traces.length).toBe(3); - - for (let i = 0; i < traces.length; i++) { - const trace = traces[i]; - const points = trace.querySelectorAll('.points path'); - expect(points.length).toBe(25); - } + const svgGraph = document.querySelector('.plot-container.plotly svg'); + expect(svgGraph).toBeInTheDocument(); }); }); diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 69b394ab60..3764afef1e 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -1854,7 +1854,7 @@ resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== -"@mapbox/geojson-rewind@^0.5.0": +"@mapbox/geojson-rewind@^0.5.2": version "0.5.2" resolved "https://registry.yarnpkg.com/@mapbox/geojson-rewind/-/geojson-rewind-0.5.2.tgz#591a5d71a9cd1da1a0bf3420b3bea31b0fc7946a" integrity sha512-tJaT+RbYGJYStt7wI3cq4Nl4SXxG8W7JDG5DMJu97V25RnbNg3QtQtf+KD+VLjNpWKYsRvXDNmNrBgEETr1ifA== @@ -2022,10 +2022,38 @@ d3-collection "1" d3-shape "^1.2.0" -"@plotly/d3@3.8.1": - version "3.8.1" - resolved "https://registry.yarnpkg.com/@plotly/d3/-/d3-3.8.1.tgz#674bf19809ffcc359e0ab388a1051f2dac5e6877" - integrity sha512-x49ThEu1FRA00kTso4Jdfyf2byaCPLBGmLjAYQz5OzaPyLUhHesX3/Nfv2OHEhynhdy2UB39DLXq6thYe2L2kg== +"@plotly/d3@3.8.2": + version "3.8.2" + resolved "https://registry.yarnpkg.com/@plotly/d3/-/d3-3.8.2.tgz#06a93a1dfc1377c1a441c24ddb156fc8da786f4a" + integrity sha512-wvsNmh1GYjyJfyEBPKJLTMzgf2c2bEbSIL50lmqVUi+o1NHaLPi1Lb4v7VxXXJn043BhNyrxUrWI85Q+zmjOVA== + +"@plotly/mapbox-gl@1.13.4": + version "1.13.4" + resolved "https://registry.yarnpkg.com/@plotly/mapbox-gl/-/mapbox-gl-1.13.4.tgz#cb854d70902dd02af753f728855152efe568524f" + integrity sha512-sR3/Pe5LqT/fhYgp4rT4aSFf1rTsxMbGiH6Hojc7PH36ny5Bn17iVFUjpzycafETURuFbLZUfjODO8LvSI+5zQ== + dependencies: + "@mapbox/geojson-rewind" "^0.5.2" + "@mapbox/geojson-types" "^1.0.2" + "@mapbox/jsonlint-lines-primitives" "^2.0.2" + "@mapbox/mapbox-gl-supported" "^1.5.0" + "@mapbox/point-geometry" "^0.1.0" + "@mapbox/tiny-sdf" "^1.1.1" + "@mapbox/unitbezier" "^0.0.0" + "@mapbox/vector-tile" "^1.3.1" + "@mapbox/whoots-js" "^3.1.0" + csscolorparser "~1.0.3" + earcut "^2.2.2" + geojson-vt "^3.2.1" + gl-matrix "^3.2.1" + grid-index "^1.1.0" + murmurhash-js "^1.0.0" + pbf "^3.2.1" + potpack "^1.0.1" + quickselect "^2.0.0" + rw "^1.3.3" + supercluster "^7.1.0" + tinyqueue "^2.0.3" + vt-pbf "^3.1.1" "@plotly/point-cluster@^3.1.9": version "3.1.9" @@ -4036,14 +4064,12 @@ color-normalize@^1.5.0: color-rgba "^2.2.0" dtype "^2.0.0" -color-parse@1.3.8: - version "1.3.8" - resolved "https://registry.yarnpkg.com/color-parse/-/color-parse-1.3.8.tgz#eaf54cd385cb34c0681f18c218aca38478082fa3" - integrity sha512-1Y79qFv0n1xair3lNMTNeoFvmc3nirMVBij24zbs1f13+7fPpQClMg5b4AuKXLt3szj7BRlHMCXHplkce6XlmA== +color-parse@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/color-parse/-/color-parse-2.0.0.tgz#6bcf1f0f1fafffe68cacc2dde7a19b3a8c3d7bcd" + integrity sha512-g2Z+QnWsdHLppAbrpcFWo629kLOnOPtpxYV69GCqm92gqSgyXbzlfyN3MXs0412fPBkFmiuS+rXposgBgBa6Kg== dependencies: color-name "^1.0.0" - defined "^1.0.0" - is-plain-obj "^1.1.0" color-parse@^1.3.8, color-parse@^1.4.1, color-parse@^1.4.2: version "1.4.2" @@ -6273,10 +6299,10 @@ gl-matrix@^3.2.1: resolved "https://registry.yarnpkg.com/gl-matrix/-/gl-matrix-3.4.3.tgz#fc1191e8320009fd4d20e9339595c6041ddc22c9" integrity sha512-wcCp8vu8FT22BnvKVPjXa/ICBWRq/zjFfdofZy1WSpQZpphblv12/bOQLBC1rMM7SGOFS9ltVmKOHil5+Ml7gA== -gl-text@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/gl-text/-/gl-text-1.3.1.tgz#f36594464101b5b053178d6d219c3d08fb9144c8" - integrity sha512-/f5gcEMiZd+UTBJLTl3D+CkCB/0UFGTx3nflH8ZmyWcLkZhsZ1+Xx5YYkw2rgWAzgPeE35xCqBuHSoMKQVsR+w== +gl-text@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/gl-text/-/gl-text-1.4.0.tgz#223f89b2719f1dbea581368a66a0edf0def63174" + integrity sha512-o47+XBqLCj1efmuNyCHt7/UEJmB9l66ql7pnobD6p+sgmBUdzfMZXIF0zD2+KRfpd99DJN+QXdvTFAGCKCVSmQ== dependencies: bit-twiddle "^1.0.2" color-normalize "^1.5.0" @@ -6479,7 +6505,7 @@ glslify-deps@^1.2.5: map-limit "0.0.1" resolve "^1.0.0" -glslify@^7.0.0, glslify@^7.1.1: +glslify@^7.0.0: version "7.1.1" resolved "https://registry.yarnpkg.com/glslify/-/glslify-7.1.1.tgz#454d9172b410cb49864029c86d5613947fefd30b" integrity sha512-bud98CJ6kGZcP9Yxcsi7Iz647wuDz3oN+IZsjCRi5X1PI7t/xPKeL0mOwXJjo+CRZMqvq0CkSJiywCcY7kVYog== @@ -8227,35 +8253,6 @@ map-limit@0.0.1: dependencies: once "~1.3.0" -mapbox-gl@1.10.1: - version "1.10.1" - resolved "https://registry.yarnpkg.com/mapbox-gl/-/mapbox-gl-1.10.1.tgz#7dbd53bdf2f78e45e125c1115e94dea286ef663c" - integrity sha512-0aHt+lFUpYfvh0kMIqXqNXqoYMuhuAsMlw87TbhWrw78Tx2zfuPI0Lx31/YPUgJ+Ire0tzQ4JnuBL7acDNXmMg== - dependencies: - "@mapbox/geojson-rewind" "^0.5.0" - "@mapbox/geojson-types" "^1.0.2" - "@mapbox/jsonlint-lines-primitives" "^2.0.2" - "@mapbox/mapbox-gl-supported" "^1.5.0" - "@mapbox/point-geometry" "^0.1.0" - "@mapbox/tiny-sdf" "^1.1.1" - "@mapbox/unitbezier" "^0.0.0" - "@mapbox/vector-tile" "^1.3.1" - "@mapbox/whoots-js" "^3.1.0" - csscolorparser "~1.0.3" - earcut "^2.2.2" - geojson-vt "^3.2.1" - gl-matrix "^3.2.1" - grid-index "^1.1.0" - minimist "^1.2.5" - murmurhash-js "^1.0.0" - pbf "^3.2.1" - potpack "^1.0.1" - quickselect "^2.0.0" - rw "^1.3.3" - supercluster "^7.0.0" - tinyqueue "^2.0.3" - vt-pbf "^3.1.1" - matchmediaquery@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/matchmediaquery/-/matchmediaquery-0.3.1.tgz#8247edc47e499ebb7c58f62a9ff9ccf5b815c6d7" @@ -8998,26 +8995,28 @@ pkg-up@^3.1.0: dependencies: find-up "^3.0.0" -plotly.js-basic-dist@^2.2.1: - version "2.17.0" - resolved "https://registry.yarnpkg.com/plotly.js-basic-dist/-/plotly.js-basic-dist-2.17.0.tgz#ca4c5940fde3ad330dcc7f7066ede7be56e1cb82" - integrity sha512-jz41gJe3eCarA/bPGNxaYlbI4cMDmIZV9tqsXM2KGXuQkPrDE+hQLhJxj1/L/AzKEmaArsK3qqCbyPy7PUv/Cg== +plotly.js-basic-dist@^2.34.0: + version "2.34.0" + resolved "https://registry.yarnpkg.com/plotly.js-basic-dist/-/plotly.js-basic-dist-2.34.0.tgz#c41b0a0b02d8abbedf065c0b8df90bf5f6845f64" + integrity sha512-0c+/5JcEuNI8+JKXN6xx3Enmmy9ju9wL90VdRIPNObMloxksHjXuXEaXAwN80IIbaUyUlOf4F9pco2H76BRn4A== -plotly.js@^2.25.2: - version "2.25.2" - resolved "https://registry.yarnpkg.com/plotly.js/-/plotly.js-2.25.2.tgz#1d96eb95954f29db602e8197abc82fa28811c086" - integrity sha512-Pf6dPYGl21W7A3FTgLQ52fpgvrqGhCPDT3+612bxwg4QXlvxhnoFwvuhT1BRW/l2nbYGpRoUH79K54yf2vCMVQ== +plotly.js@^2.34.0: + version "2.34.0" + resolved "https://registry.yarnpkg.com/plotly.js/-/plotly.js-2.34.0.tgz#880d4dcede6b52baafacbc4d38dc182957306c37" + integrity sha512-dG2LC6wY6AUR1jsnriBi9xbigLPEEXXOHhLo97dRiZAWZVS6lZCmXXZ227U4rsoluXyfyqQezaKq7svolap8Dw== dependencies: - "@plotly/d3" "3.8.1" + "@plotly/d3" "3.8.2" "@plotly/d3-sankey" "0.7.2" "@plotly/d3-sankey-circular" "0.33.1" + "@plotly/mapbox-gl" "1.13.4" "@turf/area" "^6.4.0" "@turf/bbox" "^6.4.0" "@turf/centroid" "^6.0.2" + base64-arraybuffer "^1.0.2" canvas-fit "^1.5.0" color-alpha "1.0.4" color-normalize "1.5.0" - color-parse "1.3.8" + color-parse "2.0.0" color-rgba "2.1.1" country-regex "^1.1.0" d3-force "^1.2.1" @@ -9030,12 +9029,10 @@ plotly.js@^2.25.2: d3-time-format "^2.2.3" fast-isnumeric "^1.1.4" gl-mat4 "^1.2.0" - gl-text "^1.3.1" - glslify "^7.1.1" + gl-text "^1.4.0" has-hover "^1.0.1" has-passive-events "^1.0.0" is-mobile "^4.0.0" - mapbox-gl "1.10.1" mouse-change "^1.4.0" mouse-event-offset "^3.0.2" mouse-wheel "^1.2.0" @@ -9046,8 +9043,8 @@ plotly.js@^2.25.2: probe-image-size "^7.2.3" regl "npm:@plotly/regl@^2.1.2" regl-error2d "^2.0.12" - regl-line2d "^3.1.2" - regl-scatter2d "^3.2.9" + regl-line2d "^3.1.3" + regl-scatter2d "^3.3.1" regl-splom "^1.0.14" strongly-connected-components "^1.0.1" superscript-text "^1.0.0" @@ -10463,10 +10460,10 @@ regl-error2d@^2.0.12: to-float32 "^1.1.0" update-diff "^1.1.0" -regl-line2d@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/regl-line2d/-/regl-line2d-3.1.2.tgz#2bedef7f44c1f7fae75c90f9918258723ca84c1c" - integrity sha512-nmT7WWS/WxmXAQMkgaMKWXaVmwJ65KCrjbqHGOUjjqQi6shfT96YbBOvelXwO9hG7/hjvbzjtQ2UO0L3e7YaXQ== +regl-line2d@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/regl-line2d/-/regl-line2d-3.1.3.tgz#03669c676a9e3a06973d34c68ada2400792724a4" + integrity sha512-fkgzW+tTn4QUQLpFKsUIE0sgWdCmXAM3ctXcCgoGBZTSX5FE2A0M7aynz7nrZT5baaftLrk9te54B+MEq4QcSA== dependencies: array-bounds "^1.0.1" array-find-index "^1.0.2" @@ -10475,13 +10472,12 @@ regl-line2d@^3.1.2: earcut "^2.1.5" es6-weak-map "^2.0.3" flatten-vertex-data "^1.0.2" - glslify "^7.0.0" object-assign "^4.1.1" parse-rect "^1.2.0" pick-by-alias "^1.2.0" to-float32 "^1.1.0" -regl-scatter2d@^3.2.3, regl-scatter2d@^3.2.9: +regl-scatter2d@^3.2.3, regl-scatter2d@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/regl-scatter2d/-/regl-scatter2d-3.3.1.tgz#0956952901ab30743dbdfb4c67fd358075e9b939" integrity sha512-seOmMIVwaCwemSYz/y4WE0dbSO9svNFSqtTh5RE57I7PjGo3tcUYKtH0MTSoshcAsreoqN8HoCtnn8wfHXXfKQ== @@ -11317,7 +11313,7 @@ subscribe-ui-event@^2.0.6: lodash "^4.17.15" raf "^3.0.0" -supercluster@^7.0.0: +supercluster@^7.1.0: version "7.1.5" resolved "https://registry.yarnpkg.com/supercluster/-/supercluster-7.1.5.tgz#65a6ce4a037a972767740614c19051b64b8be5a3" integrity sha512-EulshI3pGUM66o6ZdH3ReiFcvHpM3vAigyK+vcxdjpJyEbIIrtbmBdY23mGgnI24uXiGFvrGq9Gkum/8U7vJWg== diff --git a/src/goalServices/reduceGoals.ts b/src/goalServices/reduceGoals.ts index a07db8b0df..a09a7a5436 100644 --- a/src/goalServices/reduceGoals.ts +++ b/src/goalServices/reduceGoals.ts @@ -422,9 +422,9 @@ export function reduceGoals( const objectivesReducer = forReport ? reduceObjectivesForActivityReport : reduceObjectives; const where = (g: IReducedGoal, currentValue: IGoalModelInstance) => (forReport - ? g.name === currentValue.dataValues.name - : g.name === currentValue.dataValues.name - && g.status === currentValue.dataValues.status); + ? (g.name || '').trim() === (currentValue.dataValues.name || '').trim() + : (g.name || '').trim() === (currentValue.dataValues.name || '').trim() + && g.status === currentValue.dataValues.status); function getGoalCollaboratorDetails( collabType: string, diff --git a/src/lib/apiErrorHandler.js b/src/lib/apiErrorHandler.js index 745048828c..2e43fa113d 100644 --- a/src/lib/apiErrorHandler.js +++ b/src/lib/apiErrorHandler.js @@ -12,7 +12,7 @@ import { sequelize } from '../models'; * @param {Object} logContext - The logging context. * @returns {Promise} - The ID of the stored request error, or null if storing failed. */ -async function logRequestError(req, operation, error, logContext) { +export async function logRequestError(req, operation, error, logContext) { // Check if error logging should be suppressed if ( operation !== 'SequelizeError' diff --git a/src/lib/apiErrorHandler.test.js b/src/lib/apiErrorHandler.test.js index 59506f6e7e..ffbdbef55e 100644 --- a/src/lib/apiErrorHandler.test.js +++ b/src/lib/apiErrorHandler.test.js @@ -1,7 +1,8 @@ import Sequelize from 'sequelize'; import { INTERNAL_SERVER_ERROR } from 'http-codes'; import db, { RequestErrors } from '../models'; -import handleErrors, { handleUnexpectedErrorInCatchBlock } from './apiErrorHandler'; +import handleErrors, { handleUnexpectedErrorInCatchBlock, logRequestError } from './apiErrorHandler'; +import { auditLogger } from '../logger'; const mockUser = { id: 47, @@ -78,3 +79,92 @@ describe('apiErrorHandler', () => { expect(requestErrors.length).toBe(0); }); }); + +describe('logRequestError suppression', () => { + beforeEach(() => { + process.env.SUPPRESS_ERROR_LOGGING = 'true'; + }); + + afterEach(() => { + delete process.env.SUPPRESS_ERROR_LOGGING; + }); + + it('should suppress error logging and return 0', async () => { + const mockError = new Error('Test error'); + const result = await logRequestError(mockRequest, 'TestOperation', mockError, mockLogContext); + expect(result).toBe(0); + }); + + it('should not suppress logging for SequelizeError regardless of SUPPRESS_ERROR_LOGGING', async () => { + const result = await logRequestError(mockRequest, 'SequelizeError', mockSequelizeError, mockLogContext); + expect(result).not.toBe(0); + }); +}); + +describe('logRequestError failure handling', () => { + beforeEach(() => { + jest.spyOn(auditLogger, 'error').mockImplementation(() => {}); + }); + + afterEach(() => { + jest.restoreAllMocks(); + }); + + it('logs an error and returns null on failure to store RequestError', async () => { + // Simulate a failure by mocking createRequestError to throw an error + jest.spyOn(RequestErrors, 'create').mockRejectedValue(new Error('Database error')); + + const result = await logRequestError(mockRequest, 'TestOperation', new Error('Test error'), mockLogContext); + + expect(auditLogger.error).toHaveBeenCalledWith(expect.stringContaining('unable to store RequestError')); + expect(result).toBeNull(); + }); +}); + +describe('handleError development logging', () => { + beforeEach(() => { + process.env.NODE_ENV = 'development'; + jest.spyOn(auditLogger, 'error').mockImplementation(() => {}); + }); + + afterEach(() => { + delete process.env.NODE_ENV; + jest.restoreAllMocks(); + }); + + it('logs the error in development environment', async () => { + const mockError = new Error('Development error'); + await handleErrors(mockRequest, mockResponse, mockError, mockLogContext); + + expect(auditLogger.error).toHaveBeenCalledWith(mockError); + }); +}); + +describe('handleError Sequelize connection errors', () => { + beforeEach(() => { + jest.spyOn(auditLogger, 'error').mockImplementation(() => {}); + }); + + afterEach(() => { + jest.restoreAllMocks(); + }); + + it('logs connection pool info on Sequelize.ConnectionError', async () => { + const mockConnectionError = new Sequelize.ConnectionError('Connection error'); + await handleErrors(mockRequest, mockResponse, mockConnectionError, mockLogContext); + + expect(auditLogger.error).toHaveBeenCalledWith(expect.stringContaining('Connection Pool')); + }); + + it('logs connection pool info on Sequelize.ConnectionAcquireTimeoutError', async () => { + const mockConnectionAcquireTimeoutError = new Sequelize.ConnectionAcquireTimeoutError('Connection acquire timeout error'); + await handleErrors( + mockRequest, + mockResponse, + mockConnectionAcquireTimeoutError, + mockLogContext, + ); + + expect(auditLogger.error).toHaveBeenCalledWith(expect.stringContaining('Connection Pool')); + }); +}); diff --git a/src/lib/lockManager.test.js b/src/lib/lockManager.test.js index ca0e92fa35..37def26b05 100644 --- a/src/lib/lockManager.test.js +++ b/src/lib/lockManager.test.js @@ -1,3 +1,4 @@ +import { auditLogger } from '../logger'; import LockManager from './lockManager'; jest.mock('ioredis', () => jest.requireActual('ioredis-mock')); @@ -12,9 +13,17 @@ describe('LockManager', () => { port: '6379', tlsEnabled: false, }; + let mockExit; + let mockAuditLoggerError; + let mockAuditLoggerInfo; beforeEach(() => { lockManager = new LockManager(lockKey, lockTTL, redisConfig); + mockAuditLoggerError = jest.spyOn(auditLogger, 'error').mockImplementation(); + mockAuditLoggerInfo = jest.spyOn(auditLogger, 'info').mockImplementation(); + mockExit = jest.fn(); + process.exit = mockExit; + jest.spyOn(lockManager, 'close').mockResolvedValue(); }); afterEach(async () => { @@ -22,6 +31,22 @@ describe('LockManager', () => { jest.clearAllMocks(); }); + describe('handleShutdown', () => { + it('should log error and exit with code 1 on error', async () => { + const testError = new Error('Test error'); + await lockManager.handleShutdown(testError); + expect(mockAuditLoggerError).toHaveBeenCalledWith(`An error occurred: ${testError}`); + expect(mockExit).toHaveBeenCalledWith(1); + }); + + it('should log info and exit with code 0 on signal', async () => { + const testSignal = 'SIGINT'; + await lockManager.handleShutdown(testSignal); + expect(mockAuditLoggerInfo).toHaveBeenCalledWith(`Received signal: ${testSignal}`); + expect(mockExit).toHaveBeenCalledWith(0); + }); + }); + describe('acquireLock', () => { it('should acquire a lock if it is not already taken', async () => { const acquired = await lockManager.acquireLock(); @@ -74,15 +99,41 @@ describe('LockManager', () => { }); }); - describe('startRenewal', () => { - it('should start the renewal process', async () => { - jest.useFakeTimers(); - await lockManager.acquireLock(); - await lockManager.startRenewal(); + describe('LockManager renewal process', () => { + let lockManager2; + const lockKey2 = 'testLock'; + const lockTTL2 = 2000; // 2 seconds for testing - jest.advanceTimersByTime(lockTTL / 2); - expect(setInterval).toHaveBeenCalled(); - jest.useRealTimers(); + beforeEach(async () => { + lockManager2 = new LockManager(lockKey2, lockTTL2); + jest.spyOn(lockManager2, 'renewHoldTTL').mockImplementation(async () => true); + jest.spyOn(lockManager2, 'stopRenewal').mockImplementation(async () => {}); + }); + + afterEach(async () => { + lockManager2.close(); + jest.restoreAllMocks(); + }); + + it('should log an error and stop renewal if renewing the lock fails', async () => { + jest.spyOn(lockManager2, 'renewHoldTTL').mockImplementationOnce(async () => false); + const stopRenewalSpy = jest.spyOn(lockManager2, 'stopRenewal'); + const auditLoggerErrorSpy = jest.spyOn(auditLogger, 'error'); + + await lockManager2.startRenewal(); + expect(auditLoggerErrorSpy).toHaveBeenCalledWith(expect.stringContaining('Failed to renew the lock for key "testLock". Another instance may take over.')); + expect(stopRenewalSpy).toHaveBeenCalled(); + }); + + it('should log an error and stop renewal on renewal error', async () => { + const error = new Error('Renewal error'); + jest.spyOn(lockManager2, 'renewHoldTTL').mockImplementationOnce(async () => { throw error; }); + const stopRenewalSpy = jest.spyOn(lockManager2, 'stopRenewal'); + const auditLoggerErrorSpy = jest.spyOn(auditLogger, 'error'); + + await lockManager2.startRenewal(); + expect(auditLoggerErrorSpy).toHaveBeenCalledWith(expect.stringContaining('An error occurred during renewal:')); + expect(stopRenewalSpy).toHaveBeenCalled(); }); }); @@ -97,4 +148,31 @@ describe('LockManager', () => { jest.useRealTimers(); }); }); + + describe('readLock error handling', () => { + it('should log error and rethrow when getting lock value fails', async () => { + jest.spyOn(lockManager.redis, 'get').mockRejectedValue(new Error('Redis error')); + await expect(lockManager.readLock()).rejects.toThrow('Redis error'); + expect(mockAuditLoggerError).toHaveBeenCalledWith(expect.stringContaining('Error getting value at key'), expect.any(Error)); + }); + }); + + describe('executeWithLock with holdLock', () => { + it('should start renewal with the lock TTL when holdLock is true', async () => { + const callback = jest.fn().mockResolvedValue(0); + const spyStartRenewal = jest.spyOn(lockManager, 'startRenewal'); + await lockManager.acquireLock(); + await lockManager.executeWithLock(callback, true); + expect(spyStartRenewal).toHaveBeenCalledWith(lockTTL); + }); + }); + + describe('close method error handling', () => { + it('should not log or rethrow if error message is "Connection is closed."', async () => { + const testError = new Error('Connection is closed.'); + jest.spyOn(lockManager.redis, 'disconnect').mockRejectedValue(testError); + await lockManager.close(); // Expect not to throw + expect(mockAuditLoggerError).not.toHaveBeenCalledWith(`LockManager.close: ${testError.message}`, testError); + }); + }); }); diff --git a/src/lib/maintenance/common.test.js b/src/lib/maintenance/common.test.js index d4623cc750..a65d50ceef 100644 --- a/src/lib/maintenance/common.test.js +++ b/src/lib/maintenance/common.test.js @@ -279,6 +279,51 @@ describe('Maintenance Queue', () => { const isSuccessful = await maintenanceCommand(callback, category, type, data, triggeredById); expect(isSuccessful).toBe(true); }); + + it('should default data to an empty object if not provided', async () => { + const cb = jest.fn().mockResolvedValue({ isSuccessful: true }); + const cast = 'test-category'; + const t = 'test-type'; + const id = 1; + await maintenanceCommand(cb, cast, t, undefined, id); + expect(MaintenanceLog.create).toHaveBeenCalledWith({ + category: cast, + type: t, + data: {}, // Verifies that data defaults to an empty object + triggeredById: id, + }); + }); + + it('should include messages and benchmarks in the update if they are not empty', async () => { + const cb = jest.fn().mockImplementation(async (logMessages, logBenchmarks) => { + logMessages.push('Log message'); + logBenchmarks.push('Log benchmark'); + throw new Error('Test error'); + }); + const cat = 'test-category'; + const t = 'test-type'; + const d = {}; + const id = 1; + await maintenanceCommand(cb, cat, t, d, id); + expect(MaintenanceLog.update).toHaveBeenCalledWith({ + data: expect.objectContaining({ + messages: ['Log message'], + benchmarks: ['Log benchmark'], + errorMessage: 'Test error', + }), + isSuccessful: false, + }, expect.anything()); + }); + + it('should default dateOffSet to 90 if not provided, using the correct olderThan date', async () => { + const d = {}; // No dateOffSet provided, triggering the default to 90 + const id = null; + await clearMaintenanceLogs(d, id); + // Since the exact date cannot be matched due to the dynamic nature of backDate(90), + // we focus on verifying that MaintenanceLog.destroy was called without throwing an error, + // which implies the default value was used successfully. + expect(MaintenanceLog.destroy).toHaveBeenCalled(); + }); }); describe('backDate', () => { diff --git a/src/lib/migration.test.js b/src/lib/migration.test.js index 70a30be640..85aa3f7782 100644 --- a/src/lib/migration.test.js +++ b/src/lib/migration.test.js @@ -7,6 +7,7 @@ const { updateUsersFlagsEnum, dropAndRecreateEnum, updateSequence, + addValuesToEnumIfTheyDontExist, } = require('./migration'); describe('migration', () => { @@ -323,4 +324,29 @@ describe('migration', () => { ); }); }); + + describe('addValuesToEnumIfTheyDontExist', () => { + const enumName = 'test_enum'; + const enumValues = ['value1', 'value2']; + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should attempt to add each enum value if not exists', async () => { + await addValuesToEnumIfTheyDontExist(queryInterface, transaction, enumName, enumValues); + + // Verify that the query was called with the correct SQL for each enum value, + // trimming for any unintentional whitespace + enumValues.forEach((enumValue) => { + expect(queryInterface.sequelize.query).toHaveBeenCalledWith( + expect.stringContaining(`ALTER TYPE "${enumName}" ADD VALUE IF NOT EXISTS '${enumValue}';`.trim()), + { transaction }, + ); + }); + + // Verify the query was called the correct number of times for the enumValues provided + expect(queryInterface.sequelize.query).toHaveBeenCalledTimes(enumValues.length); + }); + }); }); diff --git a/src/lib/modelUtils.test.js b/src/lib/modelUtils.test.js index 51f117fd87..e4115c832c 100644 --- a/src/lib/modelUtils.test.js +++ b/src/lib/modelUtils.test.js @@ -198,5 +198,11 @@ describe('modelUtils', () => { posts: [{ id: 10, title: 'Post Title' }], }); }); + + it('returns data unmodified if it isn\'t an array or an object', () => { + const data = 'dog'; + const out = nestedRawish(data); + expect(out).toEqual('dog'); + }); }); }); diff --git a/src/lib/queue.js b/src/lib/queue.js index 0322f9cb2b..32279d471d 100644 --- a/src/lib/queue.js +++ b/src/lib/queue.js @@ -1,3 +1,4 @@ +/* eslint-disable max-len */ import Queue from 'bull'; import { auditLogger } from '../logger'; @@ -80,103 +81,103 @@ export async function increaseListeners(queue, num = 1) { } // Remove event handlers -function removeQueueEventHandlers( - queue, - errorListener, - shutdownListener, - exceptionListener, - rejectionListener, -) { - queue.removeListener('error', errorListener).catch((err) => auditLogger.error(err.message)); - process.removeListener('SIGINT', shutdownListener).catch((err) => auditLogger.error(err.message)); - process.removeListener('SIGTERM', shutdownListener).catch((err) => auditLogger.error(err.message)); - process.removeListener('uncaughtException', exceptionListener).catch((err) => auditLogger.error(err.message)); - process.removeListener('unhandledRejection', rejectionListener).catch((err) => auditLogger.error(err.message)); -} +// function removeQueueEventHandlers( +// queue, +// errorListener, +// shutdownListener, +// exceptionListener, +// rejectionListener, +// ) { +// queue.removeListener('error', errorListener).catch((err) => auditLogger.error(err.message)); +// process.removeListener('SIGINT', shutdownListener).catch((err) => auditLogger.error(err.message)); +// process.removeListener('SIGTERM', shutdownListener).catch((err) => auditLogger.error(err.message)); +// process.removeListener('uncaughtException', exceptionListener).catch((err) => auditLogger.error(err.message)); +// process.removeListener('unhandledRejection', rejectionListener).catch((err) => auditLogger.error(err.message)); +// } // Define the handlers so they can be added and removed -function handleShutdown(queue) { - return () => { - auditLogger.error('Shutting down, closing queue...'); - queue.close().then(() => { - auditLogger.error('Queue closed successfully.'); - removeQueueEventHandlers(queue); - process.exit(0); - }).catch((err) => { - auditLogger.error('Failed to close the queue:', err); - removeQueueEventHandlers(queue); - process.exit(1); - }); - }; -} - -function handleException(queue) { - return (err) => { - auditLogger.error('Uncaught exception:', err); - queue.close().then(() => { - auditLogger.error('Queue closed after uncaught exception.'); - removeQueueEventHandlers(queue); - process.exit(1); - }).catch((closeErr) => { - auditLogger.error('Failed to close the queue after uncaught exception:', closeErr); - removeQueueEventHandlers(queue); - process.exit(1); - }); - }; -} - -function handleRejection(queue) { - return (reason, promise) => { - auditLogger.error('Unhandled rejection at:', promise, 'reason:', reason); - queue.close().then(() => { - auditLogger.error('Queue closed after unhandled rejection.'); - removeQueueEventHandlers(queue); - process.exit(1); - }).catch((closeErr) => { - auditLogger.error('Failed to close the queue after unhandled rejection:', closeErr); - removeQueueEventHandlers(queue); - process.exit(1); - }); - }; -} +// function handleShutdown(queue) { +// return () => { +// auditLogger.error('Shutting down, closing queue...'); +// queue.close().then(() => { +// auditLogger.error('Queue closed successfully.'); +// removeQueueEventHandlers(queue); +// process.exit(0); +// }).catch((err) => { +// auditLogger.error('Failed to close the queue:', err); +// removeQueueEventHandlers(queue); +// process.exit(1); +// }); +// }; +// } + +// function handleException(queue) { +// return (err) => { +// auditLogger.error('Uncaught exception:', err); +// queue.close().then(() => { +// auditLogger.error('Queue closed after uncaught exception.'); +// removeQueueEventHandlers(queue); +// process.exit(1); +// }).catch((closeErr) => { +// auditLogger.error('Failed to close the queue after uncaught exception:', closeErr); +// removeQueueEventHandlers(queue); +// process.exit(1); +// }); +// }; +// } + +// function handleRejection(queue) { +// return (reason, promise) => { +// auditLogger.error('Unhandled rejection at:', promise, 'reason:', reason); +// queue.close().then(() => { +// auditLogger.error('Queue closed after unhandled rejection.'); +// removeQueueEventHandlers(queue); +// process.exit(1); +// }).catch((closeErr) => { +// auditLogger.error('Failed to close the queue after unhandled rejection:', closeErr); +// removeQueueEventHandlers(queue); +// process.exit(1); +// }); +// }; +// } // Setup event handlers -function setupQueueEventHandlers(queue) { - const shutdownListener = handleShutdown(queue); - const exceptionListener = handleException(queue); - const rejectionListener = handleRejection(queue); - - const errorListener = (err) => { - auditLogger.error('Queue encountered an error:', err); - queue.close().then(() => { - auditLogger.error('Queue closed due to an error.'); - removeQueueEventHandlers( - queue, - errorListener, - shutdownListener, - exceptionListener, - rejectionListener, - ); - }).catch((closeErr) => { - auditLogger.error('Failed to close the queue after an error:', closeErr); - removeQueueEventHandlers( - queue, - errorListener, - shutdownListener, - exceptionListener, - rejectionListener, - ); - }); - }; - - queue.on('error', errorListener); - process.on('SIGINT', shutdownListener); - process.on('SIGTERM', shutdownListener); - process.on('uncaughtException', exceptionListener); - process.on('unhandledRejection', rejectionListener); -} - -function setRedisConnectionName(queue, connectionName) { +// function setupQueueEventHandlers(queue) { +// const shutdownListener = handleShutdown(queue); +// const exceptionListener = handleException(queue); +// const rejectionListener = handleRejection(queue); + +// const errorListener = (err) => { +// auditLogger.error('Queue encountered an error:', err); +// queue.close().then(() => { +// auditLogger.error('Queue closed due to an error.'); +// removeQueueEventHandlers( +// queue, +// errorListener, +// shutdownListener, +// exceptionListener, +// rejectionListener, +// ); +// }).catch((closeErr) => { +// auditLogger.error('Failed to close the queue after an error:', closeErr); +// removeQueueEventHandlers( +// queue, +// errorListener, +// shutdownListener, +// exceptionListener, +// rejectionListener, +// ); +// }); +// }; + +// queue.on('error', errorListener); +// process.on('SIGINT', shutdownListener); +// process.on('SIGTERM', shutdownListener); +// process.on('uncaughtException', exceptionListener); +// process.on('unhandledRejection', rejectionListener); +// } + +export function setRedisConnectionName(queue, connectionName) { const { client } = queue; if (client && client.call) { client.call('client', 'setname', connectionName).catch((err) => { diff --git a/src/lib/queue.test.js b/src/lib/queue.test.js new file mode 100644 index 0000000000..648f855b60 --- /dev/null +++ b/src/lib/queue.test.js @@ -0,0 +1,133 @@ +import { auditLogger } from '../logger'; +import { generateRedisConfig, increaseListeners, setRedisConnectionName } from './queue'; + +describe('increaseListeners', () => { + const MAX_LISTENERS = 20; + let queue; + let redisClient; + + beforeEach(() => { + redisClient = { + getMaxListeners: jest.fn().mockReturnValue(10), + setMaxListeners: jest.fn(), + }; + queue = { + client: redisClient, + eventNames: jest.fn().mockReturnValue(['event1', 'event2']), + listenerCount: jest.fn().mockImplementation((eventName) => { + if (eventName === 'event1') return 5; + if (eventName === 'event2') return 3; + return 0; + }), + }; + }); + + it('increases max listeners if new total exceeds current max', async () => { + await increaseListeners(queue, 3); + expect(redisClient.setMaxListeners).toHaveBeenCalledWith(11); + }); + + it('does not change max listeners if new total does not exceed current max', async () => { + await increaseListeners(queue, 2); + expect(redisClient.setMaxListeners).not.toHaveBeenCalled(); + }); + + it('caps listener increase at MAX_LISTENERS constant', async () => { + await increaseListeners(queue, 15); + expect(redisClient.setMaxListeners).toHaveBeenCalledWith(MAX_LISTENERS); + }); + + it('does nothing if queue has no client', async () => { + queue.client = null; + await increaseListeners(queue, 1); + expect(redisClient.setMaxListeners).not.toHaveBeenCalled(); + }); +}); + +describe('generateRedisConfig with VCAP_SERVICES', () => { + const originalEnv = process.env; + + beforeEach(() => { + jest.resetModules(); + process.env = { ...originalEnv }; + }); + + afterAll(() => { + process.env = originalEnv; + }); + + it('returns TLS enabled redis settings without rate limiter', () => { + process.env.VCAP_SERVICES = JSON.stringify({ + 'aws-elasticache-redis': [{ + credentials: { + host: 'test-host', + port: '1234', + password: 'test-password', + uri: 'test-uri', + }, + }], + }); + + const config = generateRedisConfig(); + + expect(config).toEqual({ + uri: 'test-uri', + host: 'test-host', + port: '1234', + tlsEnabled: true, + redisOpts: { + redis: { + password: 'test-password', + tls: {}, + }, + }, + }); + }); + + it('returns TLS enabled redis settings with rate limiter', () => { + process.env.VCAP_SERVICES = JSON.stringify({ + 'aws-elasticache-redis': [{ + credentials: { + host: 'test-host', + port: '1234', + password: 'test-password', + uri: 'test-uri', + }, + }], + }); + process.env.REDIS_LIMITER_MAX = '2000'; + process.env.REDIS_LIMITER_DURATION = '600000'; + + const config = generateRedisConfig(true); + + expect(config).toEqual({ + uri: 'test-uri', + host: 'test-host', + port: '1234', + tlsEnabled: true, + redisOpts: { + redis: { + password: 'test-password', + tls: {}, + }, + limiter: { + max: '2000', + duration: '600000', + }, + }, + }); + }); +}); + +describe('setRedisConnectionName', () => { + it('logs an error if setting the Redis connection name fails', async () => { + const mockQueue = { + client: { + call: jest.fn().mockRejectedValue(new Error('Connection error')), + }, + }; + const auditLoggerSpy = jest.spyOn(auditLogger, 'error'); + await setRedisConnectionName(mockQueue, 'testConnectionName'); + expect(auditLoggerSpy).toHaveBeenCalledWith('Failed to set Redis connection name:', expect.any(Error)); + }); +}); diff --git a/src/lib/resource.js b/src/lib/resource.js index b214653d2e..02f0df3726 100644 --- a/src/lib/resource.js +++ b/src/lib/resource.js @@ -1,442 +1,439 @@ -/* eslint-disable import/prefer-default-export */ -import httpCodes from 'http-codes'; -import axios from 'axios'; -import he from 'he'; -import { auditLogger, logger } from '../logger'; -import { Resource } from '../models'; - -const requestOptions = { - maxRedirects: 25, - responseEncoding: 'utf8', - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36', -}; - -/** - * Filters an object by converting all string values to a supported character set. - * - * @param {object} obj - The object to filter. - * @returns {object} - The filtered object. - */ -const filterToSupportedCharacters = (obj) => JSON.parse(JSON.stringify(obj, (key, value) => { - if (typeof value === 'string') { - return Buffer.from(value, 'utf-8').toString(); - } - return value; -})); - -const commonAuthNames = [ - 'login', - 'signin', - 'authenticate', - 'unavailable', - 'auth', -]; - -/** - * Overrides the status code to UNAUTHORIZED if authentication is required. - * @param {number} statusCode - The original status code. - * @param {Array} list - The list of common authentication names. - * @param {string} data - The data to check for common authentication names. - * @param {boolean} filtered - Indicates whether the list is filtered or not. Default is false. - * @returns {number} - The updated status code. - */ -const overrideStatusCodeOnAuthRequired = (statusCode, list, data, filtered = false) => { - // Check if authentication is required based on the list and data - const requiresAuth = list - .filter((commonAuthName) => !(filtered || commonAuthName === 'auth')) - .some((commonAuthName) => data && data?.includes(commonAuthName)); - - // If authentication is required and the original status code is OK, return UNAUTHORIZED - if (statusCode === httpCodes.OK && requiresAuth) { - return httpCodes.UNAUTHORIZED; - } - - // Otherwise, return the original status code - return statusCode || httpCodes.SERVICE_UNAVAILABLE; -}; - -/** - * Retrieves the MIME type and status code of a resource from a given URL. - * - * @param {string} url - The URL of the resource. - * @returns {Promise} - An object containing the MIME type and status code. - */ -const getMimeType = async (url) => { - let mimeType; // Variable to store the MIME type of the resource. - let statusCode; // Variable to store the status code of the resource. - - try { - // Send a HEAD request to the URL and get the response. - const res = await axios.head(url, requestOptions); - mimeType = res.headers['content-type']; // Extract the MIME type from the response headers. - // Check if the URL was redirected and get the redirected URL if applicable. - const redirectedUrl = res?.request?.res?.responseUrl !== url && res?.request?.res?.responseUrl; - // Override the status code if authentication is required. - statusCode = overrideStatusCodeOnAuthRequired(res.status, commonAuthNames, redirectedUrl); - } catch (error) { - if (error.response && error.response.status) { - statusCode = error.response.status; // Get the status code from the error response. - mimeType = error.response.headers['content-type']; // Get the MIME type from the error response headers. - auditLogger.error( - `Resource Queue: Unable to retrieve header for Resource (URL: ${url}), received status code of ${statusCode}. Please make sure this is a valid address:`, - error, - ); // Log an error message with the URL and status code. - } else { - auditLogger.error('Error checking status:', error); // Log a generic error message. - } - } - - // Update URL in DB. - await Resource.update({ - ...(mimeType && { mimeType }), // Update the MIME type in the database if it exists. - // Update the status code in the database if it exists. - ...(statusCode && { lastStatusCode: statusCode }), - }, { - where: { url }, - individualHooks: true, - }); - - return { mimeType, statusCode }; // Return the MIME type and status code as an object. -}; - -/** - * Retrieves metadata values from a JSON resource. - * - * @param {string} url - The URL of the resource. - * @returns {Promise} - A promise that resolves to an object containing the metadata, - * status code, and MIME type of the resource. - */ -const getMetadataValuesFrommJson = async (url) => { - let result; - try { - // Attempt to get the resource metadata (if valid ECLKC resource). - // Sample: https://eclkc.ohs.acf.hhs.gov/mental-health/article/head-start-heals-campaign?_format=json - let metadataUrl; - - // Check if the URL already contains query parameters - if (url.includes('?')) { - metadataUrl = `${url}&_format=json`; - } else if (url.includes('#')) { // Check if the URL contains a fragment identifier - metadataUrl = `${url.split('#')[0]}?_format=json`; - } else { // Append query parameter to the URL - metadataUrl = `${url}?_format=json`; - } - const res = await axios.get(metadataUrl, requestOptions); - - result = { - // Check if the response data is a non-empty object and the content-type is JSON - metadata: (typeof res.data === 'object' && Object.keys(res.data)?.length !== 0 && res?.headers['content-type'] === 'application/json') - ? res.data - : null, - statusCode: overrideStatusCodeOnAuthRequired( - res.status, - commonAuthNames, - res?.request?.res?.responseUrl !== metadataUrl && res?.request?.res?.responseUrl, - true, - ), - mimeType: res.headers['content-type'], - }; - } catch (error) { - if (error.response) { - auditLogger.error( - `Resource Queue: Unable to collect metadata from json for Resource (URL: ${url}), received status code of ${error.response.status}. Please make sure this is a valid address:`, - error, - ); - result = { - metadata: null, - statusCode: error.response.status, - mimeType: error?.response?.headers['content-type'], - }; - } else { - auditLogger.error( - `Resource Queue: Unable to collect metadata from json for Resource (URL: ${url}). Please make sure this is a valid address:`, - error, - ); - throw error; - } - } - return result; -}; - -const metadataPatterns = [ - /<(title)[^>]*>([^<]+)<\/title>/gi, - // eslint-disable-next-line no-useless-escape - //gi, -]; -/** - * Retrieves metadata values from an HTML page. - * - * @param {string} url - The URL of the HTML page. - * @returns {Promise} - An object containing the metadata, status code, and MIME type. - */ -const getMetadataValuesFromHtml = async (url) => { - let result; - try { - // Make a GET request to the specified URL - const res = await axios.get(url, requestOptions); - - // Extract metadata values from the HTML response - const metadata = metadataPatterns - .flatMap((pattern) => ((res && res.data && typeof res.data === 'string') - ? [...(res?.data?.matchAll(pattern) || null)] - : [])) - .reduce((acc, meta) => { - if (Array.isArray(meta)) { - const key = meta[1].toLowerCase(); - const value = meta[2].trim(); - if (value && value !== '') { - if (acc[key]) { - if (Array.isArray(acc[key])) { - acc[key].push(value); - } else { - acc[key] = [acc[key], value]; - } - } else { - acc[key] = value; - } - } - } - return acc; - }, {}); - - // Prepare the result object - result = { - metadata: (typeof metadata === 'object' && Object.keys(metadata).length !== 0) - ? metadata - : null, - statusCode: overrideStatusCodeOnAuthRequired( - res?.status, - commonAuthNames, - res?.request?.res?.responseUrl !== url && res?.request?.res?.responseUrl, - true, - ), - mimeType: res?.headers['content-type'], - }; - } catch (error) { - if (error.response) { - // Log an error message when unable to collect metadata due to a response error - auditLogger.error( - `Resource Queue: Unable to collect metadata from page scrape for Resource (URL: ${url}), received status code of ${error.response.status}. Please make sure this is a valid address:`, - error, - ); - result = { - statusCode: error.response.status, - mimeType: error?.response?.headers['content-type'], - }; - } else { - // Log an error message when unable to collect metadata due to an unexpected error - auditLogger.error( - `Resource Queue: Unable to collect metadata from page scrape for Resource (URL: ${url}). Please make sure this is a valid address:`, - error, - ); - throw error; - } - } - - return result; -}; - -/** - * Retrieves metadata values for a given URL. - * @param {string} url - The URL to retrieve metadata from. - * @returns {Promise<{ title: string | null, statusCode: number }>} - The title and status - * code of the resource. - */ -const getMetadataValues = async (url) => { - let statusCode; // Variable to store the status code of the resource. - let metadata; // Variable to store the metadata of the resource. - let title = null; // Variable to store the title of the resource, initialized as null. - let mimeType; // Variable to store the MIME type of the resource. - - try { - const fromJson = await getMetadataValuesFrommJson(url); - if (fromJson.statusCode === httpCodes.OK) { - // Destructure metadata and status code from JSON result. - const statuscodeFromJson = fromJson.statusCode; - const metadataFromJson = fromJson.metadata; - // filter out unsupported characters. - metadata = filterToSupportedCharacters(metadataFromJson); - statusCode = statuscodeFromJson; - } else { - const fromHtml = await getMetadataValuesFromHtml(url); - // Destructure metadata, status code, and MIME type from HTML result. - const { - metadata: metadataFromHtml, - statusCode: statuscodeFromHtml, - mimeType: mimeTypeFromHtml, - } = fromHtml.value; - // filter out unsupported characters. - metadata = filterToSupportedCharacters(metadataFromHtml); - statusCode = statuscodeFromHtml; - mimeType = mimeTypeFromHtml; - } - // If metadata is not empty, assign it to the variable, otherwise assign null. - metadata = (Object.keys(metadata).length !== 0 && metadata) || null; - - if (metadata) { - if (metadata.title) { - if (Array.isArray(metadata.title)) { - // If title is an array, assign the first value to the variable. - title = metadata.title[0].value; - } else { - title = metadata.title; // If title is not an array, assign it directly to the variable. - } - } else if (metadata['og:title']) { - title = metadata['og:title']; // If title is not available but 'og:title' exists in metadata, assign it to the variable. - } - // Decode URI component of the title, assign it to the variable, or assign null if undefined. - title = he.decode(decodeURIComponent(title)) || null; - title = title !== 'undefined' ? title : null; // Assign null to the variable if the title is 'undefined'. - } - } catch (error) { - auditLogger.error( - `Resource Queue: Unable to retrieving metadata for Resource (URL: ${url}). Please make sure this is a valid address:`, - error, - ); // Log an error message if there is an exception while retrieving metadata. - } - await Resource.update({ - ...(title && { title }), // Update the title field in the database if it exists. - // Update the metadata and metadataUpdatedAt fields in the database if they exist. - ...(metadata && { metadata, metadataUpdatedAt: new Date() }), - ...(mimeType && { mimeType }), // Update the mimeType field in the database if it exists. - // Update the lastStatusCode field in the database if it exists. - ...(statusCode && { lastStatusCode: statusCode }), - // Update the metadataUpdatedAt field in the database with the current date. - metadataUpdatedAt: new Date(), - }, { - where: { url }, // Specify the resource to update based on the URL. - individualHooks: true, // Enable individual hooks for the update operation. - }); - return { - title, - statusCode, - }; // Return the title and status code of the resource. -}; - -/** - * Retrieves metadata values and updates a resource with the scraped data. - * - * @param {string} url - The URL of the resource to scrape. - * @returns {Promise} - An object containing the scraped title and status code. - */ -const getPageScrapeValues = async (url) => { - let statusCode; // Variable to store the status code of the HTTP response - let metadata; // Variable to store the metadata of the HTML page - let mimeType; // Variable to store the MIME type of the HTML page - let title = null; // Variable to store the extracted title from the metadata - - try { - // Call the getMetadataValuesFromHtml function and destructure the returned values - ({ metadata, statusCode, mimeType } = await getMetadataValuesFromHtml(url)); - - if (metadata) { - if (metadata?.title) { - title = metadata.title; // Extract the title from the metadata if it exists - } - if (!title && metadata['og:title']) { - title = metadata['og:title']; // Extract the Open Graph title from the metadata if title is not found - } - // Decode and sanitize the title - title = (title && he.decode(decodeURIComponent(title))) || null; - title = title !== 'undefined' && title !== 'null' ? title : null; // Set title to null if it is undefined - } - } catch (error) { - auditLogger.error( - `Resource Queue: Unable to page scrape for Resource (URL: ${url}). Please make sure this is a valid address:`, - error, - ); - throw error; // Rethrow the error to be handled by the caller - } - - // Update the resource with the scraped data - await Resource.update({ - ...(title && { title }), // Update the title if it exists - // Update the metadata and metadataUpdatedAt if they exist - ...(metadata && { metadata, metadataUpdatedAt: new Date() }), - ...(mimeType && { mimeType }), // Update the MIME type if it exists - ...(statusCode && { lastStatusCode: statusCode }), // Update the last status code if it exists - }, { - where: { url }, - individualHooks: true, - }); - - return { title, statusCode }; // Return the scraped title and status code -}; - -export const unparsableMimeTypes = [ - 'application/octet-stream', - 'application/pdf', - 'application/pdf;charset=utf-8', - 'application/vnd.ms-powerpoint', - 'application/vnd.openxmlformats-officedocument.presentationml.presentation', - 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', - 'audio/mpeg', - 'image/jpeg', - 'image/png', - 'video/mp4', -]; - -/** - * Retrieves metadata for a given resource. - * - * @param {object} job - The job object containing the resourceId and resourceUrl. - * @returns {Promise} - The status and data of the retrieved resource metadata. - */ -const getResourceMetaDataJob = async (job) => { - const { - resourceId, resourceUrl, - } = job.data; - - try { - // Determine if this is an ECLKC resource. - const isEclkc = resourceUrl.includes('eclkc.ohs.acf.hhs.gov'); - - let statusCode; - let mimeType; - let title = null; - - // Get the MIME type and status code of the resource. - // eslint-disable-next-line prefer-const - ({ mimeType, statusCode } = await getMimeType(resourceUrl)); - - // Check if the MIME type is unparsable. - if (mimeType && unparsableMimeTypes.includes(mimeType.toLowerCase().replace(/\s+/g, ''))) { - auditLogger.error(`Resource Queue: Warning, unable to process resource '${resourceUrl}', received status code '${statusCode}'.`); - return { - status: httpCodes.NO_CONTENT, - data: { url: resourceUrl }, - }; - } - - // Check if the status code indicates an error. - if (statusCode !== httpCodes.OK) { - auditLogger.error(`Resource Queue: Warning, unable to retrieve resource '${resourceUrl}', received status code '${statusCode}'.`); - return { status: statusCode || 500, data: { url: resourceUrl } }; - } - - // If it is an ECLKC resource, get the metadata values. - if (isEclkc) { - ({ title, statusCode } = await getMetadataValues(resourceUrl)); - if (statusCode !== httpCodes.OK) { - auditLogger.error(`Resource Queue: Warning, unable to retrieve metadata or resource TITLE for resource '${resourceUrl}', received status code '${statusCode || 500}'.`); - return { status: statusCode || 500, data: { url: resourceUrl } }; - } - } else { - // If it is not an ECLKC resource, scrape the page title. - ({ title, statusCode } = await getPageScrapeValues(resourceUrl)); - if (statusCode !== httpCodes.OK) { - auditLogger.error(`Resource Queue: Warning, unable to retrieve resource TITLE for resource '${resourceUrl}', received status code '${statusCode || 500}'.`); - return { status: statusCode || 500, data: { url: resourceUrl } }; - } - } - logger.info(`Resource Queue: Successfully retrieved resource metadata for resource '${resourceUrl}'`); - return { status: httpCodes.OK, data: { url: resourceUrl } }; - } catch (error) { - auditLogger.error(`Resource Queue: Unable to retrieve metadata or title for Resource (ID: ${resourceId} URL: ${resourceUrl}), please make sure this is a valid address:`, error); - return { status: httpCodes.NOT_FOUND, data: { url: resourceUrl } }; - } -}; - -export { - getResourceMetaDataJob, -}; +/* eslint-disable import/prefer-default-export */ +import httpCodes from 'http-codes'; +import axios from 'axios'; +import he from 'he'; +import { auditLogger, logger } from '../logger'; +import { Resource } from '../models'; + +const requestOptions = { + maxRedirects: 25, + responseEncoding: 'utf8', + 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36', +}; + +/** + * Filters an object by converting all string values to a supported character set. + * + * @param {object} obj - The object to filter. + * @returns {object} - The filtered object. + */ +const filterToSupportedCharacters = (obj) => JSON.parse(JSON.stringify(obj, (key, value) => { + if (typeof value === 'string') { + return Buffer.from(value, 'utf-8').toString(); + } + return value; +})); + +const commonAuthNames = [ + 'login', + 'signin', + 'authenticate', + 'unavailable', + 'auth', +]; + +/** + * Overrides the status code to UNAUTHORIZED if authentication is required. + * @param {number} statusCode - The original status code. + * @param {Array} list - The list of common authentication names. + * @param {string} data - The data to check for common authentication names. + * @param {boolean} filtered - Indicates whether the list is filtered or not. Default is false. + * @returns {number} - The updated status code. + */ +export const overrideStatusCodeOnAuthRequired = (statusCode, list, data, filtered = false) => { + const requiresAuth = list + .filter((commonAuthName) => (filtered ? commonAuthName !== 'auth' : true)) + .some((commonAuthName) => data && data.includes(commonAuthName)); + + if (statusCode === httpCodes.OK && requiresAuth) { + return httpCodes.UNAUTHORIZED; + } + + return statusCode || httpCodes.SERVICE_UNAVAILABLE; +}; + +/** + * Retrieves the MIME type and status code of a resource from a given URL. + * + * @param {string} url - The URL of the resource. + * @returns {Promise} - An object containing the MIME type and status code. + */ +const getMimeType = async (url) => { + let mimeType; // Variable to store the MIME type of the resource. + let statusCode; // Variable to store the status code of the resource. + + try { + // Send a HEAD request to the URL and get the response. + const res = await axios.head(url, requestOptions); + mimeType = res.headers['content-type']; // Extract the MIME type from the response headers. + // Check if the URL was redirected and get the redirected URL if applicable. + const redirectedUrl = res?.request?.res?.responseUrl !== url && res?.request?.res?.responseUrl; + // Override the status code if authentication is required. + statusCode = overrideStatusCodeOnAuthRequired(res.status, commonAuthNames, redirectedUrl); + } catch (error) { + if (error.response && error.response.status) { + statusCode = error.response.status; // Get the status code from the error response. + mimeType = error.response.headers['content-type']; // Get the MIME type from the error response headers. + auditLogger.error( + `Resource Queue: Unable to retrieve header for Resource (URL: ${url}), received status code of ${statusCode}. Please make sure this is a valid address:`, + error, + ); // Log an error message with the URL and status code. + } else { + auditLogger.error('Error checking status:', error); // Log a generic error message. + } + } + + // Update URL in DB. + await Resource.update({ + ...(mimeType && { mimeType }), // Update the MIME type in the database if it exists. + // Update the status code in the database if it exists. + ...(statusCode && { lastStatusCode: statusCode }), + }, { + where: { url }, + individualHooks: true, + }); + + return { mimeType, statusCode }; // Return the MIME type and status code as an object. +}; + +/** + * Retrieves metadata values from a JSON resource. + * + * @param {string} url - The URL of the resource. + * @returns {Promise} - A promise that resolves to an object containing the metadata, + * status code, and MIME type of the resource. + */ +const getMetadataValuesFrommJson = async (url) => { + let result; + try { + // Attempt to get the resource metadata (if valid ECLKC resource). + // Sample: https://eclkc.ohs.acf.hhs.gov/mental-health/article/head-start-heals-campaign?_format=json + let metadataUrl; + + // Check if the URL already contains query parameters + if (url.includes('?')) { + metadataUrl = `${url}&_format=json`; + } else if (url.includes('#')) { // Check if the URL contains a fragment identifier + metadataUrl = `${url.split('#')[0]}?_format=json`; + } else { // Append query parameter to the URL + metadataUrl = `${url}?_format=json`; + } + const res = await axios.get(metadataUrl, requestOptions); + + result = { + // Check if the response data is a non-empty object and the content-type is JSON + metadata: (typeof res.data === 'object' && Object.keys(res.data)?.length !== 0 && res?.headers['content-type'] === 'application/json') + ? res.data + : null, + statusCode: overrideStatusCodeOnAuthRequired( + res.status, + commonAuthNames, + res?.request?.res?.responseUrl !== metadataUrl && res?.request?.res?.responseUrl, + true, + ), + mimeType: res.headers['content-type'], + }; + } catch (error) { + if (error.response) { + auditLogger.error( + `Resource Queue: Unable to collect metadata from json for Resource (URL: ${url}), received status code of ${error.response.status}. Please make sure this is a valid address:`, + error, + ); + result = { + metadata: null, + statusCode: error.response.status, + mimeType: error?.response?.headers['content-type'], + }; + } else { + auditLogger.error( + `Resource Queue: Unable to collect metadata from json for Resource (URL: ${url}). Please make sure this is a valid address:`, + error, + ); + throw error; + } + } + return result; +}; + +const metadataPatterns = [ + /<(title)[^>]*>([^<]+)<\/title>/gi, + // eslint-disable-next-line no-useless-escape + //gi, +]; +/** + * Retrieves metadata values from an HTML page. + * + * @param {string} url - The URL of the HTML page. + * @returns {Promise} - An object containing the metadata, status code, and MIME type. + */ +const getMetadataValuesFromHtml = async (url) => { + let result; + try { + // Make a GET request to the specified URL + const res = await axios.get(url, requestOptions); + + // Extract metadata values from the HTML response + const metadata = metadataPatterns + .flatMap((pattern) => ((res && res.data && typeof res.data === 'string') + ? [...(res?.data?.matchAll(pattern) || null)] + : [])) + .reduce((acc, meta) => { + if (Array.isArray(meta)) { + const key = meta[1].toLowerCase(); + const value = meta[2].trim(); + if (value && value !== '') { + if (acc[key]) { + if (Array.isArray(acc[key])) { + acc[key].push(value); + } else { + acc[key] = [acc[key], value]; + } + } else { + acc[key] = value; + } + } + } + return acc; + }, {}); + + // Prepare the result object + result = { + metadata: (typeof metadata === 'object' && Object.keys(metadata).length !== 0) + ? metadata + : null, + statusCode: overrideStatusCodeOnAuthRequired( + res?.status, + commonAuthNames, + res?.request?.res?.responseUrl !== url && res?.request?.res?.responseUrl, + true, + ), + mimeType: res?.headers['content-type'], + }; + } catch (error) { + if (error.response) { + // Log an error message when unable to collect metadata due to a response error + auditLogger.error( + `Resource Queue: Unable to collect metadata from page scrape for Resource (URL: ${url}), received status code of ${error.response.status}. Please make sure this is a valid address:`, + error, + ); + result = { + statusCode: error.response.status, + mimeType: error?.response?.headers['content-type'], + }; + } else { + // Log an error message when unable to collect metadata due to an unexpected error + auditLogger.error( + `Resource Queue: Unable to collect metadata from page scrape for Resource (URL: ${url}). Please make sure this is a valid address:`, + error, + ); + throw error; + } + } + + return result; +}; + +/** + * Retrieves metadata values for a given URL. + * @param {string} url - The URL to retrieve metadata from. + * @returns {Promise<{ title: string | null, statusCode: number }>} - The title and status + * code of the resource. + */ +const getMetadataValues = async (url) => { + let statusCode; // Variable to store the status code of the resource. + let metadata; // Variable to store the metadata of the resource. + let title = null; // Variable to store the title of the resource, initialized as null. + let mimeType; // Variable to store the MIME type of the resource. + + try { + const fromJson = await getMetadataValuesFrommJson(url); + if (fromJson.statusCode === httpCodes.OK) { + // Destructure metadata and status code from JSON result. + const statuscodeFromJson = fromJson.statusCode; + const metadataFromJson = fromJson.metadata; + // filter out unsupported characters. + metadata = filterToSupportedCharacters(metadataFromJson); + statusCode = statuscodeFromJson; + } else { + const fromHtml = await getMetadataValuesFromHtml(url); + // Destructure metadata, status code, and MIME type from HTML result. + const { + metadata: metadataFromHtml, + statusCode: statuscodeFromHtml, + mimeType: mimeTypeFromHtml, + } = fromHtml.value; + // filter out unsupported characters. + metadata = filterToSupportedCharacters(metadataFromHtml); + statusCode = statuscodeFromHtml; + mimeType = mimeTypeFromHtml; + } + // If metadata is not empty, assign it to the variable, otherwise assign null. + metadata = (Object.keys(metadata).length !== 0 && metadata) || null; + + if (metadata) { + if (metadata.title) { + if (Array.isArray(metadata.title)) { + // If title is an array, assign the first value to the variable. + title = metadata.title[0].value; + } else { + title = metadata.title; // If title is not an array, assign it directly to the variable. + } + } else if (metadata['og:title']) { + title = metadata['og:title']; // If title is not available but 'og:title' exists in metadata, assign it to the variable. + } + // Decode URI component of the title, assign it to the variable, or assign null if undefined. + title = he.decode(decodeURIComponent(title)) || null; + title = title !== 'undefined' ? title : null; // Assign null to the variable if the title is 'undefined'. + } + } catch (error) { + auditLogger.error( + `Resource Queue: Unable to retrieving metadata for Resource (URL: ${url}). Please make sure this is a valid address:`, + error, + ); // Log an error message if there is an exception while retrieving metadata. + } + await Resource.update({ + ...(title && { title }), // Update the title field in the database if it exists. + // Update the metadata and metadataUpdatedAt fields in the database if they exist. + ...(metadata && { metadata, metadataUpdatedAt: new Date() }), + ...(mimeType && { mimeType }), // Update the mimeType field in the database if it exists. + // Update the lastStatusCode field in the database if it exists. + ...(statusCode && { lastStatusCode: statusCode }), + // Update the metadataUpdatedAt field in the database with the current date. + metadataUpdatedAt: new Date(), + }, { + where: { url }, // Specify the resource to update based on the URL. + individualHooks: true, // Enable individual hooks for the update operation. + }); + return { + title, + statusCode, + }; // Return the title and status code of the resource. +}; + +/** + * Retrieves metadata values and updates a resource with the scraped data. + * + * @param {string} url - The URL of the resource to scrape. + * @returns {Promise} - An object containing the scraped title and status code. + */ +const getPageScrapeValues = async (url) => { + let statusCode; // Variable to store the status code of the HTTP response + let metadata; // Variable to store the metadata of the HTML page + let mimeType; // Variable to store the MIME type of the HTML page + let title = null; // Variable to store the extracted title from the metadata + + try { + // Call the getMetadataValuesFromHtml function and destructure the returned values + ({ metadata, statusCode, mimeType } = await getMetadataValuesFromHtml(url)); + + if (metadata) { + if (metadata?.title) { + title = metadata.title; // Extract the title from the metadata if it exists + } + if (!title && metadata['og:title']) { + title = metadata['og:title']; // Extract the Open Graph title from the metadata if title is not found + } + // Decode and sanitize the title + title = (title && he.decode(decodeURIComponent(title))) || null; + title = title !== 'undefined' && title !== 'null' ? title : null; // Set title to null if it is undefined + } + } catch (error) { + auditLogger.error( + `Resource Queue: Unable to page scrape for Resource (URL: ${url}). Please make sure this is a valid address:`, + error, + ); + throw error; // Rethrow the error to be handled by the caller + } + + // Update the resource with the scraped data + await Resource.update({ + ...(title && { title }), // Update the title if it exists + // Update the metadata and metadataUpdatedAt if they exist + ...(metadata && { metadata, metadataUpdatedAt: new Date() }), + ...(mimeType && { mimeType }), // Update the MIME type if it exists + ...(statusCode && { lastStatusCode: statusCode }), // Update the last status code if it exists + }, { + where: { url }, + individualHooks: true, + }); + + return { title, statusCode }; // Return the scraped title and status code +}; + +export const unparsableMimeTypes = [ + 'application/octet-stream', + 'application/pdf', + 'application/pdf;charset=utf-8', + 'application/vnd.ms-powerpoint', + 'application/vnd.openxmlformats-officedocument.presentationml.presentation', + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + 'audio/mpeg', + 'image/jpeg', + 'image/png', + 'video/mp4', +]; + +/** + * Retrieves metadata for a given resource. + * + * @param {object} job - The job object containing the resourceId and resourceUrl. + * @returns {Promise} - The status and data of the retrieved resource metadata. + */ +const getResourceMetaDataJob = async (job) => { + const { + resourceId, resourceUrl, + } = job.data; + + try { + // Determine if this is an ECLKC resource. + const isEclkc = resourceUrl.includes('eclkc.ohs.acf.hhs.gov'); + + let statusCode; + let mimeType; + let title = null; + + // Get the MIME type and status code of the resource. + // eslint-disable-next-line prefer-const + ({ mimeType, statusCode } = await getMimeType(resourceUrl)); + + // Check if the MIME type is unparsable. + if (mimeType && unparsableMimeTypes.includes(mimeType.toLowerCase().replace(/\s+/g, ''))) { + auditLogger.error(`Resource Queue: Warning, unable to process resource '${resourceUrl}', received status code '${statusCode}'.`); + return { + status: httpCodes.NO_CONTENT, + data: { url: resourceUrl }, + }; + } + + // Check if the status code indicates an error. + if (statusCode !== httpCodes.OK) { + auditLogger.error(`Resource Queue: Warning, unable to retrieve resource '${resourceUrl}', received status code '${statusCode}'.`); + return { status: statusCode || 500, data: { url: resourceUrl } }; + } + + // If it is an ECLKC resource, get the metadata values. + if (isEclkc) { + ({ title, statusCode } = await getMetadataValues(resourceUrl)); + if (statusCode !== httpCodes.OK) { + auditLogger.error(`Resource Queue: Warning, unable to retrieve metadata or resource TITLE for resource '${resourceUrl}', received status code '${statusCode || 500}'.`); + return { status: statusCode || 500, data: { url: resourceUrl } }; + } + } else { + // If it is not an ECLKC resource, scrape the page title. + ({ title, statusCode } = await getPageScrapeValues(resourceUrl)); + if (statusCode !== httpCodes.OK) { + auditLogger.error(`Resource Queue: Warning, unable to retrieve resource TITLE for resource '${resourceUrl}', received status code '${statusCode || 500}'.`); + return { status: statusCode || 500, data: { url: resourceUrl } }; + } + } + logger.info(`Resource Queue: Successfully retrieved resource metadata for resource '${resourceUrl}'`); + return { status: httpCodes.OK, data: { url: resourceUrl } }; + } catch (error) { + auditLogger.error(`Resource Queue: Unable to retrieve metadata or title for Resource (ID: ${resourceId} URL: ${resourceUrl}), please make sure this is a valid address:`, error); + return { status: httpCodes.NOT_FOUND, data: { url: resourceUrl } }; + } +}; + +export { + getResourceMetaDataJob, +}; diff --git a/src/lib/resource.test.js b/src/lib/resource.test.js index db12f0924d..c48c4c4b09 100644 --- a/src/lib/resource.test.js +++ b/src/lib/resource.test.js @@ -1,568 +1,588 @@ -/* eslint-disable no-useless-escape */ -import axios from 'axios'; -import { expect } from '@playwright/test'; -import { auditLogger } from '../logger'; -import { getResourceMetaDataJob } from './resource'; -import db, { Resource } from '../models'; - -jest.mock('../logger'); -jest.mock('bull'); - -const urlReturn = ` - - - - - - - - - - - - - -Head Start | ECLKC - -test - - -`; - -const urlMissingTitle = ` - - - -test - - -`; - -const metadata = { - created: [{ value: '2020-04-21T15:20:23+00:00' }], - changed: [{ value: '2023-05-26T18:57:15+00:00' }], - title: [{ value: 'Head Start Heals Campaign' }], - field_taxonomy_national_centers: [{ target_type: 'taxonomy_term' }], - field_taxonomy_topic: [{ target_type: 'taxonomy_term' }], - langcode: [{ value: 'en' }], - field_context: [{ value: '

\"Two' }], -}; - -const mockAxios = jest.spyOn(axios, 'get').mockImplementation(() => Promise.resolve()); -const mockAxiosHead = jest.spyOn(axios, 'head').mockImplementation(() => Promise.resolve()); -const axiosCleanMimeResponse = { status: 200, headers: { 'content-type': 'text/html; charset=utf-8' } }; -const axiosCleanResponse = { ...axiosCleanMimeResponse, data: urlReturn }; -const axiosNoTitleResponse = { status: 404, data: urlMissingTitle, headers: { 'content-type': 'text/html; charset=utf-8' } }; -const axiosResourceNotFound = { status: 404, data: 'Not Found' }; -const axiosError = new Error(); -axiosError.response = { status: 500, data: 'Error' }; -const mockUpdate = jest.spyOn(Resource, 'update').mockImplementation(() => Promise.resolve()); - -describe('resource worker tests', () => { - let resource; - afterAll(async () => { - await Resource.destroy({ where: { id: resource.id } }); - await db.sequelize.close(); - }); - afterEach(() => { - jest.clearAllMocks(); - }); - - it('non-eclkc clean resource title get', async () => { - // Mock TITLE get. - mockAxios.mockImplementationOnce(() => Promise.resolve(axiosCleanResponse)); - mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); - mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); - - // Call the function. - const got = await getResourceMetaDataJob({ data: { resourceId: 100000, resourceUrl: 'https://test.gov/mental-health/article/head-start-heals-campaign' } }); - - // Check the response. - expect(got.status).toBe(200); - - // Check the data. - expect(got.data).toStrictEqual({ url: 'https://test.gov/mental-health/article/head-start-heals-campaign' }); - - // Check the axios call. - expect(mockAxios).toBeCalled(); - - // expect mockUpdate to have only been called once. - expect(mockUpdate).toBeCalledTimes(2); - - // Check title update. - expect(mockUpdate).toHaveBeenNthCalledWith( - 1, - { - // title: 'Head Start | ECLKC', - lastStatusCode: 200, - mimeType: axiosCleanResponse.headers['content-type'], - }, - { - where: { url: 'https://test.gov/mental-health/article/head-start-heals-campaign' }, - individualHooks: true, - }, - ); - - expect(mockUpdate).toHaveBeenLastCalledWith( - { - title: 'Head Start | ECLKC', - lastStatusCode: 200, - mimeType: axiosCleanResponse.headers['content-type'], - metadata: { - language: 'en', - topic: 'Mental Health', - 'resource-type': 'Article', - 'national-centers': 'Health, Behavioral Health, and Safety', - 'node-id': '7858', - 'exclude-from-dynamic-view': 'False', - title: 'Head Start | ECLKC', - }, - metadataUpdatedAt: expect.anything(), - }, - { where: { url: 'https://test.gov/mental-health/article/head-start-heals-campaign' }, individualHooks: true }, - ); - }); - - it('non-eclkc error on resource title get', async () => { - // Mock TITLE get. - const axiosHtmlScrapeError = new Error(); - axiosHtmlScrapeError.response = { status: 500, data: 'Error', headers: { 'content-type': 'text/html; charset=utf-8' } }; - mockAxios.mockImplementationOnce(() => Promise.reject(axiosHtmlScrapeError)); - mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); - mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); - - // Call the function. - const got = await getResourceMetaDataJob({ data: { resourceId: 100000, resourceUrl: 'https://test.gov/mental-health/article/head-start-heals-campaign' } }); - - // Check the response. - expect(got.status).toBe(500); - }); - - it('tests a clean resource metadata get', async () => { - // Metadata. - mockAxios.mockImplementationOnce(() => Promise.resolve({ - status: 200, - headers: { 'content-type': 'application/json' }, - data: metadata, - })); - mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); - mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); - - const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov' } }); - expect(got.status).toBe(200); - expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov' }); - - expect(mockUpdate).toBeCalledTimes(2); - - // Check the update call. - expect(mockUpdate).toHaveBeenLastCalledWith( - { - metadata: { - changed: [ - { - value: '2023-05-26T18:57:15+00:00', - }, - ], - created: [ - { - value: '2020-04-21T15:20:23+00:00', - }, - ], - field_context: [ - { - value: '

\"Two', - }, - ], - field_taxonomy_national_centers: [ - { - target_type: 'taxonomy_term', - }, - ], - field_taxonomy_topic: [ - { - target_type: 'taxonomy_term', - }, - ], - langcode: [ - { - value: 'en', - }, - ], - title: [ - { - value: 'Head Start Heals Campaign', - }, - ], - }, - metadataUpdatedAt: expect.anything(), - title: 'Head Start Heals Campaign', - lastStatusCode: 200, - }, - { - individualHooks: true, - where: { - url: 'http://www.eclkc.ohs.acf.hhs.gov', - }, - }, - ); - }); - - it('tests a clean resource metadata get with a url that has params', async () => { - // Metadata. - mockAxios.mockImplementationOnce(() => Promise.resolve({ - status: 200, - headers: { 'content-type': 'application/json' }, - data: metadata, - })); - mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); - mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); - - const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov/activity-reports?region.in[]=1' } }); - expect(got.status).toBe(200); - expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov/activity-reports?region.in[]=1' }); - - expect(mockUpdate).toBeCalledTimes(2); - - expect(mockAxios).toBeCalledWith( - 'http://www.eclkc.ohs.acf.hhs.gov/activity-reports?region.in[]=1&_format=json', - { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36', - maxRedirects: 25, - responseEncoding: 'utf8', - }, - ); - - // Check the update call. - expect(mockUpdate).toHaveBeenLastCalledWith( - { - metadata: { - changed: [ - { - value: '2023-05-26T18:57:15+00:00', - }, - ], - created: [ - { - value: '2020-04-21T15:20:23+00:00', - }, - ], - field_context: [ - { - value: '

\"Two', - }, - ], - field_taxonomy_national_centers: [ - { - target_type: 'taxonomy_term', - }, - ], - field_taxonomy_topic: [ - { - target_type: 'taxonomy_term', - }, - ], - langcode: [ - { - value: 'en', - }, - ], - title: [ - { - value: 'Head Start Heals Campaign', - }, - ], - }, - metadataUpdatedAt: expect.anything(), - title: 'Head Start Heals Campaign', - lastStatusCode: 200, - }, - { - individualHooks: true, - where: { - url: 'http://www.eclkc.ohs.acf.hhs.gov/activity-reports?region.in[]=1', - }, - }, - ); - }); - - it('tests a clean resource metadata get with a url that has a pound sign', async () => { - // Metadata. - mockAxios.mockImplementationOnce(() => Promise.resolve({ - status: 200, - headers: { 'content-type': 'application/json' }, - data: metadata, - })); - mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); - mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); - - const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov/section#2' } }); - expect(got.status).toBe(200); - expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov/section#2' }); - - expect(mockUpdate).toBeCalledTimes(2); - - // Expect axios get to have been called with the correct url. - expect(mockAxios).toBeCalledWith( - 'http://www.eclkc.ohs.acf.hhs.gov/section?_format=json', - { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36', - maxRedirects: 25, - responseEncoding: 'utf8', - }, - ); - - // Check the update call. - expect(mockUpdate).toHaveBeenLastCalledWith( - { - metadata: { - changed: [ - { - value: '2023-05-26T18:57:15+00:00', - }, - ], - created: [ - { - value: '2020-04-21T15:20:23+00:00', - }, - ], - field_context: [ - { - value: '

\"Two', - }, - ], - field_taxonomy_national_centers: [ - { - target_type: 'taxonomy_term', - }, - ], - field_taxonomy_topic: [ - { - target_type: 'taxonomy_term', - }, - ], - langcode: [ - { - value: 'en', - }, - ], - title: [ - { - value: 'Head Start Heals Campaign', - }, - ], - }, - metadataUpdatedAt: expect.anything(), - title: 'Head Start Heals Campaign', - lastStatusCode: 200, - }, - { - individualHooks: true, - where: { - url: 'http://www.eclkc.ohs.acf.hhs.gov/section#2', - }, - }, - ); - }); - - it('tests error with a response from get metadata', async () => { - const axiosMetadataErrorResponse = new Error(); - axiosMetadataErrorResponse.response = { status: 500, data: 'Error', headers: { 'content-type': 'text/html; charset=utf-8' } }; - mockAxios.mockImplementationOnce( - () => Promise.reject(axiosMetadataErrorResponse), - ).mockImplementationOnce(() => Promise.resolve(axiosMetadataErrorResponse)); - - mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); - mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); - - const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov' } }); - expect(got.status).toBe(500); - expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov' }); - - expect(mockUpdate).toBeCalledTimes(2); - }); - - it('tests error without a response from get metadata', async () => { - const axiosMetadataErrorResponse = new Error(); - axiosMetadataErrorResponse.response = { data: 'Error' }; - mockAxios.mockImplementationOnce(() => Promise.reject(axiosMetadataErrorResponse)); - - mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); - mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); - - const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov' } }); - - // Verify auditlogger.error was called with the message we expect. - expect(auditLogger.error).toBeCalledTimes(3); - }); - - it('eclkc resource we get metadata but no title', async () => { - mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); - mockAxios.mockImplementationOnce(() => Promise.resolve({ - status: 200, - headers: { 'content-type': 'application/json' }, - data: { ...metadata, title: null }, - })); - mockAxios.mockImplementationOnce(() => Promise.resolve({ - status: 200, - headers: { 'content-type': 'text/html; charset=utf-8' }, - data: urlMissingTitle, - })); - mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); - - // Scrape. - mockAxios.mockImplementationOnce(() => Promise.resolve(axiosCleanResponse)); - mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); - - const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov' } }); - expect(got.status).toBe(200); - expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov' }); - - expect(mockUpdate).toBeCalledTimes(2); - - // Check title scrape update.. - expect(mockUpdate).toBeCalledWith( - { - lastStatusCode: 200, - mimeType: 'text/html; charset=utf-8', - }, - { - individualHooks: true, - where: { url: 'http://www.eclkc.ohs.acf.hhs.gov' }, - }, - ); - - // Check the update call. - expect(mockUpdate).toBeCalledWith( - { - metadata: { - changed: [ - { - value: '2023-05-26T18:57:15+00:00', - }, - ], - created: [ - { - value: '2020-04-21T15:20:23+00:00', - }, - ], - field_context: [ - { - value: '

\"Two', - }, - ], - field_taxonomy_national_centers: [ - { - target_type: 'taxonomy_term', - }, - ], - field_taxonomy_topic: [ - { - target_type: 'taxonomy_term', - }, - ], - langcode: [ - { - value: 'en', - }, - ], - title: null, - }, - metadataUpdatedAt: expect.anything(), - title: 'null', - lastStatusCode: 200, - }, - { - individualHooks: true, - where: { - url: 'http://www.eclkc.ohs.acf.hhs.gov', - }, - }, - ); - }); - - it('non-eclkc resource missing title', async () => { - mockAxiosHead.mockImplementationOnce(() => Promise.resolve({ headers: { 'content-type': 'text/html; charset=utf-8' }, status: 404 })); - mockAxios.mockImplementationOnce(() => Promise.resolve(axiosNoTitleResponse)); - const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.test.gov' } }); - expect(got.status).toBe(404); - expect(got.data).toStrictEqual({ url: 'http://www.test.gov' }); - expect(mockAxiosHead).toBeCalled(); - expect(mockAxios).not.toBeCalled(); - expect(mockUpdate).toBeCalled(); - }); - - it('non-eclkc resource url not found', async () => { - mockAxiosHead.mockImplementationOnce(() => Promise.resolve({ headers: { 'content-type': 'text/html; charset=utf-8' }, status: 404 })); - mockAxios.mockImplementationOnce(() => Promise.resolve(axiosResourceNotFound)); - const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.test.gov' } }); - expect(got.status).toBe(404); - expect(got.data).toStrictEqual({ url: 'http://www.test.gov' }); - expect(mockAxiosHead).toBeCalled(); - expect(mockAxios).not.toBeCalled(); - expect(mockUpdate).toBeCalled(); - }); - - it('eclkc resource url not found', async () => { - mockAxiosHead.mockImplementationOnce(() => Promise.resolve({ headers: { 'content-type': 'text/html; charset=utf-8' }, status: 404 })); - mockAxios.mockImplementationOnce(() => Promise.resolve(axiosResourceNotFound)); - const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov' } }); - expect(got.status).toBe(404); - expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov' }); - expect(mockAxiosHead).toBeCalled(); - expect(mockAxios).not.toBeCalled(); - expect(mockUpdate).toBeCalled(); - }); - - it('get mime type handles error response correctly', async () => { - // Mock auditLogger.error. - const mockAuditLogger = jest.spyOn(auditLogger, 'error'); - // Mock error on axios head error. - const axiosMimeError = new Error(); - axiosMimeError.response = { status: 500, data: 'Error', headers: { 'content-type': 'text/html; charset=utf-8' } }; - mockAxiosHead.mockImplementationOnce(() => Promise.reject(axiosMimeError)); - - // Call the function. - const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.test.gov' } }); - // Check the response. - expect(got.status).toBe(500); - - // Expect auditLogger.error to have been called with the correct message. - expect(mockAuditLogger).toBeCalledTimes(2); - - // Check the axios call. - expect(mockAxiosHead).toBeCalled(); - - // Check the update call. - expect(mockUpdate).toBeCalledTimes(1); - - // Check the update call. - expect(mockUpdate).toBeCalledWith( - { - lastStatusCode: 500, - mimeType: 'text/html; charset=utf-8', - }, - { - individualHooks: true, - where: { - url: 'http://www.test.gov', - }, - }, - ); - }); - - it('get mime type handles no error response correctly', async () => { - // Mock error on axios head error. - const axiosMimeError = new Error(); - axiosMimeError.response = { data: 'Error' }; - mockAxiosHead.mockImplementationOnce(() => Promise.reject(axiosMimeError)); - - // Call the function. - const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.test.gov' } }); - - // Check the response. - expect(got).toEqual({ - status: 500, - data: { url: 'http://www.test.gov' }, - }); - }); -}); +/* eslint-disable no-useless-escape */ +import axios from 'axios'; +import { expect } from '@playwright/test'; +import { auditLogger } from '../logger'; +import { getResourceMetaDataJob, overrideStatusCodeOnAuthRequired } from './resource'; +import db, { Resource } from '../models'; + +jest.mock('../logger'); +jest.mock('bull'); + +const urlReturn = ` + + + + + + + + + + + + + +Head Start | ECLKC + +test + + +`; + +const urlMissingTitle = ` + + + +test + + +`; + +const metadata = { + created: [{ value: '2020-04-21T15:20:23+00:00' }], + changed: [{ value: '2023-05-26T18:57:15+00:00' }], + title: [{ value: 'Head Start Heals Campaign' }], + field_taxonomy_national_centers: [{ target_type: 'taxonomy_term' }], + field_taxonomy_topic: [{ target_type: 'taxonomy_term' }], + langcode: [{ value: 'en' }], + field_context: [{ value: '

\"Two' }], +}; + +const mockAxios = jest.spyOn(axios, 'get').mockImplementation(() => Promise.resolve()); +const mockAxiosHead = jest.spyOn(axios, 'head').mockImplementation(() => Promise.resolve()); +const axiosCleanMimeResponse = { status: 200, headers: { 'content-type': 'text/html; charset=utf-8' } }; +const axiosCleanResponse = { ...axiosCleanMimeResponse, data: urlReturn }; +const axiosNoTitleResponse = { status: 404, data: urlMissingTitle, headers: { 'content-type': 'text/html; charset=utf-8' } }; +const axiosResourceNotFound = { status: 404, data: 'Not Found' }; +const axiosError = new Error(); +axiosError.response = { status: 500, data: 'Error' }; +const mockUpdate = jest.spyOn(Resource, 'update').mockImplementation(() => Promise.resolve()); + +describe('resource worker tests', () => { + let resource; + afterAll(async () => { + await Resource.destroy({ where: { id: resource.id } }); + await db.sequelize.close(); + }); + afterEach(() => { + jest.clearAllMocks(); + }); + + it('non-eclkc clean resource title get', async () => { + // Mock TITLE get. + mockAxios.mockImplementationOnce(() => Promise.resolve(axiosCleanResponse)); + mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); + mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); + + // Call the function. + const got = await getResourceMetaDataJob({ data: { resourceId: 100000, resourceUrl: 'https://test.gov/mental-health/article/head-start-heals-campaign' } }); + + // Check the response. + expect(got.status).toBe(200); + + // Check the data. + expect(got.data).toStrictEqual({ url: 'https://test.gov/mental-health/article/head-start-heals-campaign' }); + + // Check the axios call. + expect(mockAxios).toBeCalled(); + + // expect mockUpdate to have only been called once. + expect(mockUpdate).toBeCalledTimes(2); + + // Check title update. + expect(mockUpdate).toHaveBeenNthCalledWith( + 1, + { + // title: 'Head Start | ECLKC', + lastStatusCode: 200, + mimeType: axiosCleanResponse.headers['content-type'], + }, + { + where: { url: 'https://test.gov/mental-health/article/head-start-heals-campaign' }, + individualHooks: true, + }, + ); + + expect(mockUpdate).toHaveBeenLastCalledWith( + { + title: 'Head Start | ECLKC', + lastStatusCode: 200, + mimeType: axiosCleanResponse.headers['content-type'], + metadata: { + language: 'en', + topic: 'Mental Health', + 'resource-type': 'Article', + 'national-centers': 'Health, Behavioral Health, and Safety', + 'node-id': '7858', + 'exclude-from-dynamic-view': 'False', + title: 'Head Start | ECLKC', + }, + metadataUpdatedAt: expect.anything(), + }, + { where: { url: 'https://test.gov/mental-health/article/head-start-heals-campaign' }, individualHooks: true }, + ); + }); + + it('non-eclkc error on resource title get', async () => { + // Mock TITLE get. + const axiosHtmlScrapeError = new Error(); + axiosHtmlScrapeError.response = { status: 500, data: 'Error', headers: { 'content-type': 'text/html; charset=utf-8' } }; + mockAxios.mockImplementationOnce(() => Promise.reject(axiosHtmlScrapeError)); + mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); + mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); + + // Call the function. + const got = await getResourceMetaDataJob({ data: { resourceId: 100000, resourceUrl: 'https://test.gov/mental-health/article/head-start-heals-campaign' } }); + + // Check the response. + expect(got.status).toBe(500); + }); + + it('tests a clean resource metadata get', async () => { + // Metadata. + mockAxios.mockImplementationOnce(() => Promise.resolve({ + status: 200, + headers: { 'content-type': 'application/json' }, + data: metadata, + })); + mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); + mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); + + const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov' } }); + expect(got.status).toBe(200); + expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov' }); + + expect(mockUpdate).toBeCalledTimes(2); + + // Check the update call. + expect(mockUpdate).toHaveBeenLastCalledWith( + { + metadata: { + changed: [ + { + value: '2023-05-26T18:57:15+00:00', + }, + ], + created: [ + { + value: '2020-04-21T15:20:23+00:00', + }, + ], + field_context: [ + { + value: '

\"Two', + }, + ], + field_taxonomy_national_centers: [ + { + target_type: 'taxonomy_term', + }, + ], + field_taxonomy_topic: [ + { + target_type: 'taxonomy_term', + }, + ], + langcode: [ + { + value: 'en', + }, + ], + title: [ + { + value: 'Head Start Heals Campaign', + }, + ], + }, + metadataUpdatedAt: expect.anything(), + title: 'Head Start Heals Campaign', + lastStatusCode: 200, + }, + { + individualHooks: true, + where: { + url: 'http://www.eclkc.ohs.acf.hhs.gov', + }, + }, + ); + }); + + it('tests a clean resource metadata get with a url that has params', async () => { + // Metadata. + mockAxios.mockImplementationOnce(() => Promise.resolve({ + status: 200, + headers: { 'content-type': 'application/json' }, + data: metadata, + })); + mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); + mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); + + const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov/activity-reports?region.in[]=1' } }); + expect(got.status).toBe(200); + expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov/activity-reports?region.in[]=1' }); + + expect(mockUpdate).toBeCalledTimes(2); + + expect(mockAxios).toBeCalledWith( + 'http://www.eclkc.ohs.acf.hhs.gov/activity-reports?region.in[]=1&_format=json', + { + 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36', + maxRedirects: 25, + responseEncoding: 'utf8', + }, + ); + + // Check the update call. + expect(mockUpdate).toHaveBeenLastCalledWith( + { + metadata: { + changed: [ + { + value: '2023-05-26T18:57:15+00:00', + }, + ], + created: [ + { + value: '2020-04-21T15:20:23+00:00', + }, + ], + field_context: [ + { + value: '

\"Two', + }, + ], + field_taxonomy_national_centers: [ + { + target_type: 'taxonomy_term', + }, + ], + field_taxonomy_topic: [ + { + target_type: 'taxonomy_term', + }, + ], + langcode: [ + { + value: 'en', + }, + ], + title: [ + { + value: 'Head Start Heals Campaign', + }, + ], + }, + metadataUpdatedAt: expect.anything(), + title: 'Head Start Heals Campaign', + lastStatusCode: 200, + }, + { + individualHooks: true, + where: { + url: 'http://www.eclkc.ohs.acf.hhs.gov/activity-reports?region.in[]=1', + }, + }, + ); + }); + + it('tests a clean resource metadata get with a url that has a pound sign', async () => { + // Metadata. + mockAxios.mockImplementationOnce(() => Promise.resolve({ + status: 200, + headers: { 'content-type': 'application/json' }, + data: metadata, + })); + mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); + mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); + + const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov/section#2' } }); + expect(got.status).toBe(200); + expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov/section#2' }); + + expect(mockUpdate).toBeCalledTimes(2); + + // Expect axios get to have been called with the correct url. + expect(mockAxios).toBeCalledWith( + 'http://www.eclkc.ohs.acf.hhs.gov/section?_format=json', + { + 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36', + maxRedirects: 25, + responseEncoding: 'utf8', + }, + ); + + // Check the update call. + expect(mockUpdate).toHaveBeenLastCalledWith( + { + metadata: { + changed: [ + { + value: '2023-05-26T18:57:15+00:00', + }, + ], + created: [ + { + value: '2020-04-21T15:20:23+00:00', + }, + ], + field_context: [ + { + value: '

\"Two', + }, + ], + field_taxonomy_national_centers: [ + { + target_type: 'taxonomy_term', + }, + ], + field_taxonomy_topic: [ + { + target_type: 'taxonomy_term', + }, + ], + langcode: [ + { + value: 'en', + }, + ], + title: [ + { + value: 'Head Start Heals Campaign', + }, + ], + }, + metadataUpdatedAt: expect.anything(), + title: 'Head Start Heals Campaign', + lastStatusCode: 200, + }, + { + individualHooks: true, + where: { + url: 'http://www.eclkc.ohs.acf.hhs.gov/section#2', + }, + }, + ); + }); + + it('tests error with a response from get metadata', async () => { + const axiosMetadataErrorResponse = new Error(); + axiosMetadataErrorResponse.response = { status: 500, data: 'Error', headers: { 'content-type': 'text/html; charset=utf-8' } }; + mockAxios.mockImplementationOnce( + () => Promise.reject(axiosMetadataErrorResponse), + ).mockImplementationOnce(() => Promise.resolve(axiosMetadataErrorResponse)); + + mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); + mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); + + const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov' } }); + expect(got.status).toBe(500); + expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov' }); + + expect(mockUpdate).toBeCalledTimes(2); + }); + + it('tests error without a response from get metadata', async () => { + const axiosMetadataErrorResponse = new Error(); + axiosMetadataErrorResponse.response = { data: 'Error' }; + mockAxios.mockImplementationOnce(() => Promise.reject(axiosMetadataErrorResponse)); + + mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); + mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); + + const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov' } }); + + // Verify auditlogger.error was called with the message we expect. + expect(auditLogger.error).toBeCalledTimes(3); + }); + + it('eclkc resource we get metadata but no title', async () => { + mockAxiosHead.mockImplementationOnce(() => Promise.resolve(axiosCleanMimeResponse)); + mockAxios.mockImplementationOnce(() => Promise.resolve({ + status: 200, + headers: { 'content-type': 'application/json' }, + data: { ...metadata, title: null }, + })); + mockAxios.mockImplementationOnce(() => Promise.resolve({ + status: 200, + headers: { 'content-type': 'text/html; charset=utf-8' }, + data: urlMissingTitle, + })); + mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); + + // Scrape. + mockAxios.mockImplementationOnce(() => Promise.resolve(axiosCleanResponse)); + mockUpdate.mockImplementationOnce(() => Promise.resolve([1])); + + const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov' } }); + expect(got.status).toBe(200); + expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov' }); + + expect(mockUpdate).toBeCalledTimes(2); + + // Check title scrape update.. + expect(mockUpdate).toBeCalledWith( + { + lastStatusCode: 200, + mimeType: 'text/html; charset=utf-8', + }, + { + individualHooks: true, + where: { url: 'http://www.eclkc.ohs.acf.hhs.gov' }, + }, + ); + + // Check the update call. + expect(mockUpdate).toBeCalledWith( + { + metadata: { + changed: [ + { + value: '2023-05-26T18:57:15+00:00', + }, + ], + created: [ + { + value: '2020-04-21T15:20:23+00:00', + }, + ], + field_context: [ + { + value: '

\"Two', + }, + ], + field_taxonomy_national_centers: [ + { + target_type: 'taxonomy_term', + }, + ], + field_taxonomy_topic: [ + { + target_type: 'taxonomy_term', + }, + ], + langcode: [ + { + value: 'en', + }, + ], + title: null, + }, + metadataUpdatedAt: expect.anything(), + title: 'null', + lastStatusCode: 200, + }, + { + individualHooks: true, + where: { + url: 'http://www.eclkc.ohs.acf.hhs.gov', + }, + }, + ); + }); + + it('non-eclkc resource missing title', async () => { + mockAxiosHead.mockImplementationOnce(() => Promise.resolve({ headers: { 'content-type': 'text/html; charset=utf-8' }, status: 404 })); + mockAxios.mockImplementationOnce(() => Promise.resolve(axiosNoTitleResponse)); + const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.test.gov' } }); + expect(got.status).toBe(404); + expect(got.data).toStrictEqual({ url: 'http://www.test.gov' }); + expect(mockAxiosHead).toBeCalled(); + expect(mockAxios).not.toBeCalled(); + expect(mockUpdate).toBeCalled(); + }); + + it('non-eclkc resource url not found', async () => { + mockAxiosHead.mockImplementationOnce(() => Promise.resolve({ headers: { 'content-type': 'text/html; charset=utf-8' }, status: 404 })); + mockAxios.mockImplementationOnce(() => Promise.resolve(axiosResourceNotFound)); + const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.test.gov' } }); + expect(got.status).toBe(404); + expect(got.data).toStrictEqual({ url: 'http://www.test.gov' }); + expect(mockAxiosHead).toBeCalled(); + expect(mockAxios).not.toBeCalled(); + expect(mockUpdate).toBeCalled(); + }); + + it('eclkc resource url not found', async () => { + mockAxiosHead.mockImplementationOnce(() => Promise.resolve({ headers: { 'content-type': 'text/html; charset=utf-8' }, status: 404 })); + mockAxios.mockImplementationOnce(() => Promise.resolve(axiosResourceNotFound)); + const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.eclkc.ohs.acf.hhs.gov' } }); + expect(got.status).toBe(404); + expect(got.data).toStrictEqual({ url: 'http://www.eclkc.ohs.acf.hhs.gov' }); + expect(mockAxiosHead).toBeCalled(); + expect(mockAxios).not.toBeCalled(); + expect(mockUpdate).toBeCalled(); + }); + + it('get mime type handles error response correctly', async () => { + // Mock auditLogger.error. + const mockAuditLogger = jest.spyOn(auditLogger, 'error'); + // Mock error on axios head error. + const axiosMimeError = new Error(); + axiosMimeError.response = { status: 500, data: 'Error', headers: { 'content-type': 'text/html; charset=utf-8' } }; + mockAxiosHead.mockImplementationOnce(() => Promise.reject(axiosMimeError)); + + // Call the function. + const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.test.gov' } }); + // Check the response. + expect(got.status).toBe(500); + + // Expect auditLogger.error to have been called with the correct message. + expect(mockAuditLogger).toBeCalledTimes(2); + + // Check the axios call. + expect(mockAxiosHead).toBeCalled(); + + // Check the update call. + expect(mockUpdate).toBeCalledTimes(1); + + // Check the update call. + expect(mockUpdate).toBeCalledWith( + { + lastStatusCode: 500, + mimeType: 'text/html; charset=utf-8', + }, + { + individualHooks: true, + where: { + url: 'http://www.test.gov', + }, + }, + ); + }); + + it('get mime type handles no error response correctly', async () => { + // Mock error on axios head error. + const axiosMimeError = new Error(); + axiosMimeError.response = { data: 'Error' }; + mockAxiosHead.mockImplementationOnce(() => Promise.reject(axiosMimeError)); + + // Call the function. + const got = await getResourceMetaDataJob({ data: { resourceUrl: 'http://www.test.gov' } }); + + // Check the response. + expect(got).toEqual({ + status: 500, + data: { url: 'http://www.test.gov' }, + }); + }); +}); + +describe('overrideStatusCodeOnAuthRequired', () => { + const httpCodes = { OK: 200, UNAUTHORIZED: 401, SERVICE_UNAVAILABLE: 503 }; + + it('returns UNAUTHORIZED if status code is OK and authentication is required', () => { + const statusCode = httpCodes.OK; + const list = ['auth']; + const data = 'some data with auth requirement'; + const result = overrideStatusCodeOnAuthRequired(statusCode, list, data); + expect(result).toBe(httpCodes.UNAUTHORIZED); + }); + + it('returns OK if status code is OK and authentication is not required', () => { + const statusCode = httpCodes.OK; + const list = ['no-auth']; + const data = 'data without auth requirement'; + const result = overrideStatusCodeOnAuthRequired(statusCode, list, data); + expect(result).toBe(httpCodes.OK); + }); +}); diff --git a/src/lib/semaphore.test.js b/src/lib/semaphore.test.js index a0eb580632..d595bdf154 100644 --- a/src/lib/semaphore.test.js +++ b/src/lib/semaphore.test.js @@ -53,4 +53,60 @@ describe('Semaphore', () => { semaphore.release(); expect(semaphore.data[''].currentConcurrency).toBe(0); }); + + it('should not crash when releasing without any operation in progress', () => { + const semaphore = new Semaphore(); + expect(() => semaphore.release()).not.toThrow(); + }); + + it('should correctly decrement currentConcurrency when no waiting promises', async () => { + const semaphore = new Semaphore(2); + + // Acquire two locks + await semaphore.acquire(); + await semaphore.acquire(); + expect(semaphore.data[''].currentConcurrency).toBe(2); + + // Release one lock + semaphore.release(); + expect(semaphore.data[''].currentConcurrency).toBe(1); + }); + + it('should resolve the oldest waiting promise when released', async () => { + const semaphore = new Semaphore(1); + + // Acquire the first lock + await semaphore.acquire(); + let wasFirstPromiseResolved = false; + let wasSecondPromiseResolved = false; + + // Attempt to acquire two more locks + semaphore.acquire().then(() => { + wasFirstPromiseResolved = true; + }); + semaphore.acquire().then(() => { + wasSecondPromiseResolved = true; + }); + + // Initially, none should be resolved + expect(wasFirstPromiseResolved).toBe(false); + expect(wasSecondPromiseResolved).toBe(false); + + // Release the lock, expecting the first waiting promise to resolve + semaphore.release(); + await new Promise( + // eslint-disable-next-line no-promise-executor-return + (resolve) => setTimeout(resolve, 0), + ); // Wait for promises to potentially resolve + expect(wasFirstPromiseResolved).toBe(true); + expect(wasSecondPromiseResolved).toBe(false); + + // Release the lock again, expecting the second waiting promise to resolve + semaphore.release(); + await new Promise( + // eslint-disable-next-line no-promise-executor-return + (resolve) => setTimeout(resolve, 0), + ); // Wait for promises to potentially resolve + expect(wasSecondPromiseResolved).toBe(true); + }); }); diff --git a/src/lib/stream/tests/s3.test.js b/src/lib/stream/tests/s3.test.js index e9f58ea41c..53e17d44b5 100644 --- a/src/lib/stream/tests/s3.test.js +++ b/src/lib/stream/tests/s3.test.js @@ -2,6 +2,7 @@ import AWS from 'aws-sdk'; import { Readable } from 'stream'; import { auditLogger } from '../../../logger'; import S3Client from '../s3'; +import { generateS3Config } from '../../s3'; jest.mock('aws-sdk'); @@ -37,6 +38,30 @@ describe('S3Client', () => { jest.clearAllMocks(); }); + describe('constructor', () => { + it('should create an S3 client with default configuration', () => { + const s3Config = generateS3Config(); + const client = new S3Client(); + expect(AWS.S3).toHaveBeenCalledWith(s3Config.s3Config); + }); + + it('should create an S3 client with custom configuration', () => { + const customConfig = { + bucketName: 'custom-bucket', + s3Config: { + accessKeyId: 'customAccessKeyId', + endpoint: 'customEndpoint', + region: 'customRegion', + secretAccessKey: 'customSecretAccessKey', + signatureVersion: 'v4', + s3ForcePathStyle: true, + }, + }; + const client = new S3Client(customConfig); + expect(AWS.S3).toHaveBeenCalledWith(customConfig.s3Config); + }); + }); + describe('uploadFileAsStream', () => { it('should upload file as stream', async () => { const key = 'test-key'; diff --git a/src/lib/updateGrantsRecipients.js b/src/lib/updateGrantsRecipients.js index a83720b67d..23f8adc381 100644 --- a/src/lib/updateGrantsRecipients.js +++ b/src/lib/updateGrantsRecipients.js @@ -61,7 +61,7 @@ function combineNames(firstName, lastName) { return joinedName === '' ? null : joinedName; } -function getPersonnelField(role, field, program) { +export function getPersonnelField(role, field, program) { // return if program is not an object. if (typeof program !== 'object') { return null; diff --git a/src/lib/updateGrantsRecipients.test.js b/src/lib/updateGrantsRecipients.test.js index bb0a084214..19ee6efff6 100644 --- a/src/lib/updateGrantsRecipients.test.js +++ b/src/lib/updateGrantsRecipients.test.js @@ -2,7 +2,7 @@ import { Op, QueryTypes } from 'sequelize'; import axios from 'axios'; import fs from 'mz/fs'; -import updateGrantsRecipients, { processFiles, updateCDIGrantsWithOldGrantData } from './updateGrantsRecipients'; +import updateGrantsRecipients, { getPersonnelField, processFiles, updateCDIGrantsWithOldGrantData } from './updateGrantsRecipients'; import db, { sequelize, Recipient, Goal, Grant, Program, ZALGrant, ActivityRecipient, ProgramPersonnel, } from '../models'; @@ -1003,3 +1003,10 @@ describe('Update grants, program personnel, and recipients', () => { }); }); }); + +describe('getPersonnelField', () => { + it('returns null when data is not an object', () => { + const out = getPersonnelField('role', 'field', 'program'); + expect(out).toBeNull(); + }); +}); diff --git a/src/migrations/20240801000000-merge_duplicate_args.js b/src/migrations/20240801000000-merge_duplicate_args.js new file mode 100644 index 0000000000..01f0abda85 --- /dev/null +++ b/src/migrations/20240801000000-merge_duplicate_args.js @@ -0,0 +1,41 @@ +const { + prepMigration, +} = require('../lib/migration'); + +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface) { + await queryInterface.sequelize.transaction(async (transaction) => { + const sessionSig = __filename; + await prepMigration(queryInterface, transaction, sessionSig); + await queryInterface.sequelize.query(` + + -- Call the preexisting function for deduping args + -- created in 20240520000000-merge_duplicate_args.js + SELECT dedupe_args(); + + -- The expected results look like: + -- op_order | op_name | record_cnt + ------------+-----------------+------------ + -- 1 | relinked_argfrs | 0 + -- 2 | deleted_argfrs | 0 + -- 3 | relinked_argrs | 0 + -- 4 | deleted_argrs | 0 + -- 5 | deleted_args | 66 + SELECT + 1 op_order, + 'relinked_argfrs' op_name, + COUNT(*) record_cnt + FROM relinked_argfrs + UNION SELECT 2, 'deleted_argfrs', COUNT(*) FROM deleted_argfrs + UNION SELECT 3, 'relinked_argrs', COUNT(*) FROM relinked_argrs + UNION SELECT 4, 'deleted_argrs', COUNT(*) FROM deleted_argrs + UNION SELECT 5, 'deleted_args', COUNT(*) FROM deleted_args + ORDER BY 1; + `, { transaction }); + }); + }, + async down() { + // rolling back merges and deletes would be a mess + }, +}; diff --git a/src/migrations/20240802204120-repair-multiple-aros.js b/src/migrations/20240802204120-repair-multiple-aros.js new file mode 100644 index 0000000000..cfd142a777 --- /dev/null +++ b/src/migrations/20240802204120-repair-multiple-aros.js @@ -0,0 +1,27 @@ +const { + prepMigration, +} = require('../lib/migration'); + +module.exports = { + up: async (queryInterface) => queryInterface.sequelize.transaction( + async (transaction) => { + await prepMigration(queryInterface, transaction, __filename); + // somehow this ARO was duplicated three times. This migration repairs it. + // To verify, view the report (45555) as it's owner and confirm that the goals and + // objectives page shows 1 goal with TTA provided and 2 files + await queryInterface.sequelize.query(/* sql */` + DELETE FROM "ActivityReportObjectiveFiles" WHERE "activityReportObjectiveId" IN (232020, 232022); + DELETE FROM "ActivityReportObjectives" WHERE id IN (232020, 232022) + + `, { transaction }); + }, + ), + + down: async (queryInterface) => queryInterface.sequelize.transaction( + async (transaction) => { + await prepMigration(queryInterface, transaction, __filename); + // If we end up needing to revert this, it would be easier to use a separate + // migration using the txid (or a similar identifier) after it's already set + }, + ), +}; diff --git a/src/policies/event.js b/src/policies/event.js index 9bb7138f36..c4e11b589b 100644 --- a/src/policies/event.js +++ b/src/policies/event.js @@ -26,6 +26,11 @@ export default class EventReport { ].includes(p.scopeId) && p.regionId === this.eventReport.regionId); } + hasPocInRegion() { + // eslint-disable-next-line max-len + return !!this.permissions.find((p) => p.scopeId === SCOPES.POC_TRAINING_REPORTS && p.regionId === this.eventReport.regionId); + } + /** * Determines if the user has write access to the specified region * or to the region of their current event report. diff --git a/src/services/activityReports.js b/src/services/activityReports.js index 3b5f7fb50c..001f17ef5a 100644 --- a/src/services/activityReports.js +++ b/src/services/activityReports.js @@ -14,7 +14,6 @@ import { ActivityReportCollaborator, ActivityReportFile, sequelize, - Sequelize, ActivityRecipient, File, Grant, @@ -1043,6 +1042,7 @@ export async function setStatus(report, status) { * @returns {*} Grants and Other entities */ export async function possibleRecipients(regionId, activityReportId = null) { + const inactiveDayDuration = 61; const grants = await Recipient.findAll({ attributes: [ 'id', @@ -1080,7 +1080,18 @@ export async function possibleRecipients(regionId, activityReportId = null) { '$grants.regionId$': regionId, [Op.or]: [ { '$grants.status$': 'Active' }, - { '$grants->activityRecipients.activityReportId$': activityReportId }, + { ...(activityReportId ? { '$grants.activityRecipients.activityReportId$': activityReportId } : {}) }, + { + '$grants.inactivationDate$': { + [Op.gte]: sequelize.literal(` + CASE + WHEN ${activityReportId ? 'true' : 'false'} + THEN (SELECT COALESCE("startDate", NOW() - INTERVAL '${inactiveDayDuration} days') FROM "ActivityReports" WHERE "id" = ${activityReportId}) + ELSE date_trunc('day', NOW()) - interval '${inactiveDayDuration} days' + END + `), + }, + }, ], }, }); diff --git a/src/services/activityReports.test.js b/src/services/activityReports.test.js index 93e2c88253..5cab8335a2 100644 --- a/src/services/activityReports.test.js +++ b/src/services/activityReports.test.js @@ -1,3 +1,4 @@ +import faker from '@faker-js/faker'; import { APPROVER_STATUSES, REPORT_STATUSES } from '@ttahub/common'; import db, { ActivityReport, @@ -45,6 +46,14 @@ const ALERT_RECIPIENT_ID = 345; const RECIPIENT_WITH_PROGRAMS_ID = 425; const DOWNLOAD_RECIPIENT_WITH_PROGRAMS_ID = 426; +const INACTIVE_GRANT_ID_ONE = faker.datatype.number({ min: 9999 }); +const INACTIVE_GRANT_ID_TWO = faker.datatype.number({ min: 9999 }); +const INACTIVE_GRANT_ID_THREE = faker.datatype.number({ min: 9999 }); + +let inactiveActivityReportOne; +let inactiveActivityReportTwo; +let inactiveActivityReportMissingStartDate; + const mockUser = { id: 1115665161, homeRegionId: 1, @@ -388,6 +397,80 @@ describe('Activity report service', () => { startDate: new Date(), endDate: new Date(), }); + + // Create a inactive grant with a 'inactivationDate' date less than 60 days ago. + await Grant.create({ + id: INACTIVE_GRANT_ID_ONE, + number: faker.datatype.number({ min: 9999 }), + recipientId: RECIPIENT_ID, + regionId: 19, + status: 'Inactive', + startDate: new Date(), + endDate: new Date(), + inactivationDate: new Date(new Date().setDate(new Date().getDate() - 60)), + }); + + // Create a inactive grant with a 'inactivationDate' date more than 60 days ago. + await Grant.create({ + id: INACTIVE_GRANT_ID_TWO, + number: faker.datatype.number({ min: 9999 }), + recipientId: RECIPIENT_ID, + regionId: 19, + status: 'Inactive', + startDate: new Date(), + endDate: new Date(), + inactivationDate: new Date(new Date().setDate(new Date().getDate() - 62)), + }); + + await Grant.create({ + id: INACTIVE_GRANT_ID_THREE, + number: faker.datatype.number({ min: 9999 }), + recipientId: RECIPIENT_ID, + regionId: 19, + status: 'Inactive', + startDate: new Date(), + endDate: new Date(), + inactivationDate: new Date(), + }); + + // Create a ActivityReport within 60 days. + inactiveActivityReportOne = await ActivityReport.create({ + ...submittedReport, + userId: mockUser.id, + lastUpdatedById: mockUser.id, + submissionStatus: REPORT_STATUSES.DRAFT, + calculatedStatus: REPORT_STATUSES.DRAFT, + activityRecipients: [], + // Set a start date that will return the inactive grant. + startDate: new Date(new Date().setDate(new Date().getDate() - 62)), + endDate: new Date(new Date().setDate(new Date().getDate() - 62)), + }); + + // Create a ActivityReport outside of 60 days. + inactiveActivityReportTwo = await ActivityReport.create({ + ...submittedReport, + userId: mockUser.id, + lastUpdatedById: mockUser.id, + submissionStatus: REPORT_STATUSES.DRAFT, + calculatedStatus: REPORT_STATUSES.DRAFT, + activityRecipients: [], + // Set a start date that will NOT return the inactive grant. + startDate: new Date(new Date().setDate(new Date().getDate() + 62)), + endDate: new Date(new Date().setDate(new Date().getDate() + 62)), + }); + + // Create a ActivityReport without start date. + inactiveActivityReportMissingStartDate = await ActivityReport.create({ + ...submittedReport, + userId: mockUser.id, + lastUpdatedById: mockUser.id, + submissionStatus: REPORT_STATUSES.DRAFT, + calculatedStatus: REPORT_STATUSES.DRAFT, + activityRecipients: [], + // If there is no start date use today's date. + startDate: null, + endDate: null, + }); }); afterAll(async () => { @@ -1068,8 +1151,19 @@ describe('Activity report service', () => { it('retrieves correct recipients in region', async () => { const region = 19; const recipients = await possibleRecipients(region); - expect(recipients.grants.length).toBe(1); + + // Get the grant with the id ALERT_RECIPIENT_ID. + const alertRecipient = recipients.grants[0].grants.filter( + (grant) => grant.dataValues.activityRecipientId === RECIPIENT_ID, + ); + expect(alertRecipient.length).toBe(1); + + // Get the grant with the id inactiveGrantIdOne. + const inactiveRecipient = recipients.grants[0].grants.filter( + (grant) => grant.dataValues.activityRecipientId === INACTIVE_GRANT_ID_ONE, + ); + expect(inactiveRecipient.length).toBe(1); }); it('retrieves no recipients in empty region', async () => { @@ -1078,6 +1172,63 @@ describe('Activity report service', () => { expect(recipients.grants.length).toBe(0); }); + + it('retrieves inactive grant inside of range with report', async () => { + const region = 19; + const recipients = await possibleRecipients(region, inactiveActivityReportOne.id); + expect(recipients.grants.length).toBe(1); + + // Get the grant with the id RECIPIENT_ID. + const alertRecipient = recipients.grants[0].grants.filter( + (grant) => grant.dataValues.activityRecipientId === RECIPIENT_ID, + ); + expect(alertRecipient.length).toBe(1); + + // Get the grant with the id inactiveGrantIdOne. + const inactiveRecipient = recipients.grants[0].grants.filter( + (grant) => grant.dataValues.activityRecipientId === INACTIVE_GRANT_ID_ONE, + ); + expect(inactiveRecipient.length).toBe(1); + }); + + it('doesn\'t retrieve inactive grant outside of range with report', async () => { + const region = 19; + const recipients = await possibleRecipients(region, inactiveActivityReportTwo.id); + expect(recipients.grants.length).toBe(1); + expect(recipients.grants[0].grants.length).toBe(1); + + // Get the grant with the id RECIPIENT_ID. + const alertRecipient = recipients.grants[0].grants.filter( + (grant) => grant.dataValues.activityRecipientId === RECIPIENT_ID, + ); + expect(alertRecipient.length).toBe(1); + }); + + it('retrieves inactive grant inside of range with report missing start date', async () => { + const region = 19; + // eslint-disable-next-line max-len + const recipients = await possibleRecipients(region, inactiveActivityReportMissingStartDate.id); + expect(recipients.grants.length).toBe(1); + expect(recipients.grants[0].grants.length).toBe(3); + + // Get the grant with the id RECIPIENT_ID. + const alertRecipient = recipients.grants[0].grants.filter( + (grant) => grant.dataValues.activityRecipientId === RECIPIENT_ID, + ); + expect(alertRecipient.length).toBe(1); + + // Get the grant with the id INACTIVE_GRANT_ID_ONE. + const inactiveGrantOne = recipients.grants[0].grants.filter( + (grant) => grant.dataValues.activityRecipientId === INACTIVE_GRANT_ID_ONE, + ); + expect(inactiveGrantOne.length).toBe(1); + + // Get the grant with the id INACTIVE_GRANT_ID_THREE (todays date). + const inactiveGrantThree = recipients.grants[0].grants.filter( + (grant) => grant.dataValues.activityRecipientId === INACTIVE_GRANT_ID_THREE, + ); + expect(inactiveGrantThree.length).toBe(1); + }); }); describe('getAllDownloadableActivityReports', () => { diff --git a/src/services/event.test.js b/src/services/event.test.js index f97edf77c1..1dd8bf0014 100644 --- a/src/services/event.test.js +++ b/src/services/event.test.js @@ -445,31 +445,48 @@ describe('event service', () => { }); describe('tr import', () => { - let user; let data; let buffer; let created; - const eventIdsToDestroy = []; const userId = faker.datatype.number(); + const pocId = faker.datatype.number(); + let poc; + const collaboratorId = faker.datatype.number(); + let collaborator; - const eventId = 'R01-TR-02-3333'; + let ncOne; + let ncTwo; + + const eventId = 'R01-TR-3333'; const regionId = 1; - const editTitle = 'Hogwarts Academy'; - const istName = 'Harry Potter'; + const eventTitle = 'Hogwarts Academy'; const email = 'smartsheetevents@ss.com'; const audience = 'Recipients'; const vision = 'To learn'; - const duration = 'Series'; + const trainingType = 'Series'; const targetPopulation = `"Program Staff Affected by Disaster"`; const reasons = `"Complaint Planning/Coordination"`; - const organizer = 'Dumbledore'; - - const headings = ['Event ID', 'Edit Title', 'IST Name:', 'Creator', 'Event Organizer - Type of Event', 'Event Duration/# NC Days of Support', 'Reason for Activity', 'Target Population(s)', 'Audience', 'Overall Vision/Goal for the PD Event']; + const typeOfEvent = 'IST TTA/Visit'; + + const headings = [ + 'IST/Creator', + 'Event ID', + 'Event Title', + 'Event Organizer - Type of Event', + 'National Centers', + 'Event Duration', + 'Reason(s) for PD', + 'Vision/Goal/Outcomes for the PD Event', + 'Target Population(s)', + 'Audience', + 'Designated Region POC for Event/Request', + ]; beforeAll(async () => { + // owner await db.User.create({ id: userId, homeRegionId: regionId, @@ -477,55 +494,114 @@ describe('event service', () => { hsesUserId: faker.datatype.string(), email, lastLogin: new Date(), + name: `${faker.name.firstName()} ${faker.name.lastName()}`, }); + await db.Permission.create({ userId, regionId: 1, scopeId: SCOPES.READ_WRITE_TRAINING_REPORTS, }); - user = await db.User.findOne({ where: { id: userId } }); + + // collaborator + collaborator = await db.User.create({ + id: collaboratorId, + homeRegionId: regionId, + hsesUsername: faker.datatype.string(), + hsesUserId: faker.datatype.string(), + email: faker.internet.email(), + lastLogin: new Date(), + name: `${faker.name.firstName()} ${faker.name.lastName()}`, + }); + + await db.Permission.create({ + userId: collaboratorId, + regionId: 1, + scopeId: SCOPES.READ_WRITE_TRAINING_REPORTS, + }); + + // poc + poc = await db.User.create({ + id: pocId, + homeRegionId: regionId, + hsesUsername: faker.datatype.string(), + hsesUserId: faker.datatype.string(), + email: faker.internet.email(), + lastLogin: new Date(), + name: `${faker.name.firstName()} ${faker.name.lastName()}`, + }); + + await db.Permission.create({ + userId: pocId, + regionId: 1, + scopeId: SCOPES.POC_TRAINING_REPORTS, + }); + + // national centers + ncOne = await db.NationalCenter.create({ + name: faker.hacker.abbreviation(), + }); + + // owner for national center 1 + await db.NationalCenterUser.create({ + userId, + nationalCenterId: ncOne.id, + }); + + ncTwo = await db.NationalCenter.create({ + name: faker.hacker.abbreviation(), + }); + + // collab is national center user 2 + await db.NationalCenterUser.create({ + userId: collaboratorId, + nationalCenterId: ncTwo.id, + }); + data = `${headings.join(',')} -${eventId},${editTitle},${istName},${email},${organizer},${duration},${reasons},${targetPopulation},${audience},${vision} -R01-TR-4234,bad_title,bad_istname,bad_email,bad_organizer,bad_duration,bad_reasons,bad_target,${audience},bad_vision`; +${email},${eventId},${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType},${reasons},${vision},${targetPopulation},${audience},${poc.name}`; buffer = Buffer.from(data); }); afterAll(async () => { - await db.User.destroy({ where: { id: userId } }); await db.EventReportPilot.destroy({ where: { ownerId: userId } }); - // await db.EventReportPilot.destroy({ where: { id: created.id } }); - await db.Permission.destroy({ where: { userId } }); + await db.NationalCenterUser.destroy({ + where: { userId: [userId, collaboratorId] }, + }); + await db.NationalCenter.destroy({ where: { id: [ncOne.id, ncTwo.id] } }); + await db.Permission.destroy({ where: { id: [userId, collaboratorId, pocId] } }); + await db.User.destroy({ where: { id: [userId, collaboratorId, pocId] } }); }); it('imports good data correctly', async () => { const result = await csvImport(buffer); + expect(result.errors).toEqual([]); + expect(result.count).toEqual(1); + // eventId is now a field in the jsonb body of the "data" column on // db.EventReportPilot. // Let's make sure it exists. created = await db.EventReportPilot.findOne({ where: { 'data.eventId': eventId }, - raw: true, }); + expect(created).not.toBeNull(); + expect(created).toHaveProperty('ownerId', userId); expect(created).toHaveProperty('regionId', regionId); expect(created.data.reasons).toEqual(['Complaint', 'Planning/Coordination']); expect(created.data.vision).toEqual(vision); - expect(created.data.audience).toEqual(audience); + expect(created.data.eventIntendedAudience).toEqual(audience); expect(created.data.targetPopulations).toEqual(['Program Staff', 'Affected by Disaster']); - expect(created.data.eventOrganizer).toEqual(organizer); + expect(created.data.eventOrganizer).toEqual(typeOfEvent); expect(created.data.creator).toEqual(email); - expect(created.data.istName).toEqual(istName); - expect(created.data.eventName).toEqual(editTitle); - expect(created.data.eventDuration).toEqual(duration); - - expect(result.count).toEqual(1); - expect(result.errors).toEqual(['User bad_email does not exist']); + expect(created.data.eventName).toEqual(eventTitle); + expect(created.data.trainingType).toEqual(trainingType); const secondImport = `${headings.join(',')} -${eventId},bad_title,bad_istname,${email},bad_organizer,bad_duration,bad_reasons,bad_target,${audience},bad_vision`; +${email},${eventId},${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType},${reasons},${vision},${targetPopulation},${audience},${poc.name}`; // Subsequent import with event ID that already exists in the database // should skip importing this TR. @@ -536,9 +612,12 @@ ${eventId},bad_title,bad_istname,${email},bad_organizer,bad_duration,bad_reasons it('gives an error if the user can\'t write in the region', async () => { await db.Permission.destroy({ where: { userId } }); - const result = await csvImport(buffer); + const d = `${headings.join(',')} +${email},R01-TR-3334,${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType},${reasons},${vision},${targetPopulation},${audience},${poc.name}`; + const b = Buffer.from(d); + const result = await csvImport(b); expect(result.count).toEqual(0); - expect(result.errors).toEqual([`User ${email} does not have permission to write in region ${regionId}`, 'User bad_email does not exist']); + expect(result.errors).toEqual([`User ${email} does not have permission to write in region ${regionId}`]); await db.Permission.create({ userId, regionId: 1, @@ -546,36 +625,56 @@ ${eventId},bad_title,bad_istname,${email},bad_organizer,bad_duration,bad_reasons }); }); + it('errors if the POC user lacks permissions', async () => { + await db.Permission.destroy({ where: { userId: pocId } }); + const d = `${headings.join(',')} +${email},R01-TR-3334,${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType},${reasons},${vision},${targetPopulation},${audience},${poc.name}`; + const b = Buffer.from(d); + const result = await csvImport(b); + expect(result.count).toEqual(0); + expect(result.errors).toEqual([`User ${poc.name} does not have POC permission in region ${regionId}`]); + await db.Permission.create({ + userId: pocId, + regionId: 1, + scopeId: SCOPES.POC_TRAINING_REPORTS, + }); + }); + + it('errors if the IST Collaborator user lacks permissions', async () => { + await db.Permission.destroy({ where: { userId: collaboratorId } }); + const d = `${headings.join(',')} +${email},R01-TR-3334,${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType},${reasons},${vision},${targetPopulation},${audience},${poc.name}`; + const b = Buffer.from(d); + const result = await csvImport(b); + expect(result.count).toEqual(0); + expect(result.errors).toEqual([`User ${collaborator.name} does not have permission to write in region ${regionId}`]); + await db.Permission.create({ + userId: collaboratorId, + regionId: 1, + scopeId: SCOPES.READ_WRITE_TRAINING_REPORTS, + }); + }); + it('skips rows that don\'t start with the correct prefix', async () => { - const reportId = 'R01-TR-5842'; const dataToTest = `${headings.join(',')} -${reportId},tr_title,tr_istname,${email},tr_organizer,tr_duration,tr_reasons,tr_target,${audience},tr_vision -01-TR-4256,tr_title,tr_istname,${email},tr_organizer,tr_duration,tr_reasons,tr_target,${audience},tr_vision -R-TR-3426,tr_title,tr_istname,${email},tr_organizer,tr_duration,tr_reasons,tr_target,${audience},tr_vision`; - - eventIdsToDestroy.push(reportId); +${email},01-TR-4256,${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType},${reasons},${vision},${targetPopulation},${audience},${poc.name}`; const bufferWithSkips = Buffer.from(dataToTest); const result = await csvImport(bufferWithSkips); - expect(result.count).toEqual(1); - expect(result.skipped.length).toEqual(2); + expect(result.skipped.length).toEqual(1); expect(result.skipped).toEqual( - ['Invalid "Event ID" format expected R##-TR-#### received 01-TR-4256', 'Invalid "Event ID" format expected R##-TR-#### received R-TR-3426'], + ['Invalid "Event ID" format expected R##-TR-#### received 01-TR-4256'], ); }); it('only imports valid columns ignores others', async () => { const mixedColumns = `${headings.join(',')},Extra Column`; const reportId = 'R01-TR-3478'; - const dataToTest = `${mixedColumns} -${reportId},tr_title,tr_istname,${email},tr_organizer,tr_duration,tr_reasons,tr_target,${audience},tr_vision,extra_data`; - - eventIdsToDestroy.push(reportId); - - const bufferWithSkips = Buffer.from(dataToTest); - - const result = await csvImport(bufferWithSkips); + const d = `${mixedColumns} +${email},${reportId},${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType},${reasons},${vision},${targetPopulation},${audience},${poc.name},JIBBER-JABBER`; + const b = Buffer.from(d); + const result = await csvImport(b); expect(result.count).toEqual(1); expect(result.skipped.length).toEqual(0); expect(result.errors.length).toEqual(0); @@ -585,26 +684,21 @@ ${reportId},tr_title,tr_istname,${email},tr_organizer,tr_duration,tr_reasons,tr_ }); expect(importedEvent).not.toBeNull(); - // Assert 11 core fields, plus goal and goals[]. - expect(Object.keys(importedEvent.data).length).toEqual(12); // Assert data does not contain the extra column. expect(importedEvent.data).not.toHaveProperty('Extra Column'); }); it('only imports valid reasons ignores others', async () => { + const mixedColumns = `${headings.join(',')},Extra Column`; const reportId = 'R01-TR-9528'; const reasonsToTest = `"New Director or Management Complaint Planning/Coordination Invalid Reason"`; - const dataToTest = `${headings.join(',')} -${reportId},tr_title,tr_istname,${email},tr_organizer,tr_duration,${reasonsToTest},tr_target,${audience},tr_vision`; - - eventIdsToDestroy.push(reportId); - - const bufferWithSkips = Buffer.from(dataToTest); - - const result = await csvImport(bufferWithSkips); + const d = `${mixedColumns} +${email},${reportId},${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType},${reasonsToTest},${vision},${targetPopulation},${audience},${poc.name},JIBBER-JABBER`; + const b = Buffer.from(d); + const result = await csvImport(b); expect(result.count).toEqual(1); expect(result.skipped.length).toEqual(0); expect(result.errors.length).toEqual(0); @@ -613,24 +707,20 @@ ${reportId},tr_title,tr_istname,${email},tr_organizer,tr_duration,${reasonsToTes where: { 'data.eventId': reportId }, }); expect(importedEvent).not.toBeNull(); - - // Assert data.reasons contains only valid reasons. expect(importedEvent.data.reasons).toEqual(['New Director or Management', 'Complaint', 'Planning/Coordination']); }); it('only imports valid target populations ignores others', async () => { + const mixedColumns = `${headings.join(',')},Extra Column`; const reportId = 'R01-TR-6578'; const tgtPopToTest = `"Program Staff - Pregnant Women / Pregnant Persons - Invalid Pop"`; - const dataToTest = `${headings.join(',')} -${reportId},tr_title,tr_istname,${email},tr_organizer,tr_duration,Complaint,${tgtPopToTest},${audience},tr_vision`; - - eventIdsToDestroy.push(reportId); + Pregnant Women / Pregnant Persons + Invalid Pop"`; - const bufferWithSkips = Buffer.from(dataToTest); - - const result = await csvImport(bufferWithSkips); + const d = `${mixedColumns} +${email},${reportId},${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType},${reasons},${vision},${tgtPopToTest},${audience},${poc.name},JIBBER-JABBER`; + const b = Buffer.from(d); + const result = await csvImport(b); expect(result.count).toEqual(1); expect(result.skipped.length).toEqual(0); expect(result.errors.length).toEqual(0); @@ -639,36 +729,19 @@ ${reportId},tr_title,tr_istname,${email},tr_organizer,tr_duration,Complaint,${tg where: { 'data.eventId': reportId }, }); expect(importedEvent).not.toBeNull(); - - // Assert data.reasons contains only valid reasons. expect(importedEvent.data.targetPopulations).toEqual(['Program Staff', 'Pregnant Women / Pregnant Persons']); }); it('skips rows that have an invalid audience', async () => { const reportId = 'R01-TR-5725'; - const dataToTest = `${headings.join(',')} -${reportId},tr_title,tr_istname,${email},tr_organizer,tr_duration,tr_reasons,tr_target,"Regional office/TTA",tr_vision -R01-TR-4658,tr_title,tr_istname,${email},tr_organizer,tr_duration,tr_reasons,tr_target,"Invalid Audience",tr_vision`; - - eventIdsToDestroy.push(reportId); - - const bufferWithSkips = Buffer.from(dataToTest); - - const result = await csvImport(bufferWithSkips); - expect(result.count).toEqual(1); + const mixedColumns = `${headings.join(',')},Extra Column`; + const d = `${mixedColumns} +${email},${reportId},${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType},${reasons},${vision},${targetPopulation},Invalid Audience,${poc.name},JIBBER-JABBER`; + const b = Buffer.from(d); + const result = await csvImport(b); + expect(result.count).toEqual(0); expect(result.skipped.length).toEqual(1); - expect(result.skipped).toEqual(['Value "Invalid Audience" is invalid for column "Audience". Must be of one of Recipients, Regional office/TTA: R01-TR-4658']); - - // Retrieve the imported event. - const importedEvent = await db.EventReportPilot.findOne({ - where: { 'data.eventId': reportId }, - }); - - // Assert the imported event is not null. - expect(importedEvent).not.toBeNull(); - - // Assert the imported event has the correct audience. - expect(importedEvent.data.audience).toEqual('Regional office/TTA'); + expect(result.skipped).toEqual(['Value "Invalid Audience" is invalid for column "Audience". Must be of one of Recipients, Regional office/TTA: R01-TR-5725']); }); }); diff --git a/src/services/event.ts b/src/services/event.ts index 3536ca4b98..9c271e5c20 100644 --- a/src/services/event.ts +++ b/src/services/event.ts @@ -6,6 +6,7 @@ import { TRAINING_REPORT_STATUSES as TRS, REASONS, TARGET_POPULATIONS, + EVENT_TARGET_POPULATIONS, EVENT_AUDIENCE, } from '@ttahub/common'; import { auditLogger } from '../logger'; @@ -24,6 +25,19 @@ const { EventReportPilotNationalCenterUser, } = db; +type UserWhereOptions = { + name?: { [Op.iLike]: string }; + email?: string; +}; + +type WhereOptions = { + id?: number; + ownerId?: number; + pocIds?: number; + collaboratorIds?: number[]; + regionId?: number; +}; + export const validateFields = (request, requiredFields) => { const missingFields = requiredFields.filter((field) => !request[field]); @@ -267,14 +281,6 @@ async function findEventHelperBlob({ return events || null; } -type WhereOptions = { - id?: number; - ownerId?: number; - pocIds?: number; - collaboratorIds?: number[]; - regionId?: number; -}; - /** * Updates an existing event in the database or creates a new one if it doesn't exist. * @param request An object containing all fields to be updated for the event. @@ -496,15 +502,17 @@ export async function findAllEvents(): Promise { const splitPipe = (str: string) => str.split('\n').map((s) => s.trim()).filter(Boolean); const mappings: Record = { - Audience: 'audience', - Creator: 'creator', - 'Edit Title': 'eventName', + Audience: 'eventIntendedAudience', + 'IST/Creator': 'creator', 'Event Title': 'eventName', - 'Event Duration/#NC Days of Support': 'eventDuration', - 'Event Duration/# NC Days of Support': 'eventDuration', + 'Event Duration': 'trainingType', + 'Event Duration/#NC Days of Support': 'trainingType', + 'Event Duration/# NC Days of Support': 'trainingType', 'Event ID': 'eventId', 'Overall Vision/Goal for the PD Event': 'vision', + 'Vision/Goal/Outcomes for the PD Event': 'vision', 'Reason for Activity': 'reasons', + 'Reason(s) for PD': 'reasons', 'Target Population(s)': 'targetPopulations', 'Event Organizer - Type of Event': 'eventOrganizer', 'IST Name:': 'istName', @@ -536,26 +544,63 @@ const mapLineToData = (line: Record) => { return data; }; -const checkUserExists = async (creator: string) => { +const checkUserExists = async (userWhere: UserWhereOptions) => { + const user = await db.User.findOne({ + where: userWhere, + include: [ + { + model: db.Permission, + as: 'permissions', + }, + { + model: db.NationalCenter, + as: 'nationalCenters', + }, + ], + }); + + if (!user) { + throw new Error(`User with ${ + Object.keys(userWhere).map((key) => `${key}: ${userWhere[key]}`).join('AND ') + } does not exist`); + } + return user; +}; + +const checkUserExistsByNationalCenter = async (identifier: string) => { const user = await db.User.findOne({ - where: { email: creator }, + attributes: ['id', 'name'], include: [ { model: db.Permission, as: 'permissions', }, { + attributes: ['name'], model: db.NationalCenter, as: 'nationalCenters', + where: { + name: identifier, + }, + required: true, }, ], }); - if (!user) throw new Error(`User ${creator} does not exist`); + + if (!user) throw new Error(`User associated with National Center: ${identifier} does not exist`); return user; }; +const checkUserExistsByName = async (name: string) => checkUserExists({ + name: { + [Op.iLike]: name, + }, +}); +const checkUserExistsByEmail = async (email: string) => checkUserExists({ email }); + const checkEventExists = async (eventId: string) => { const event = await db.EventReportPilot.findOne({ + attributes: ['id'], where: { id: { [Op.in]: sequelize.literal( @@ -564,6 +609,7 @@ const checkEventExists = async (eventId: string) => { }, }, }); + if (event) throw new Error(`Event ${eventId} already exists`); }; @@ -581,11 +627,14 @@ export async function csvImport(buffer: Buffer) { const eventId = cleanLine['Event ID']; // If the eventId doesn't start with the prefix R and two numbers, it's invalid. - if (!eventId.match(/^R\d{2}/i)) { + const match = eventId.match(/^R\d{2}/i); + if (match === null) { skipped.push(`Invalid "Event ID" format expected R##-TR-#### received ${eventId}`); return false; } + await checkEventExists(eventId); + // Validate audience else skip. if (!EVENT_AUDIENCE.includes(cleanLine.Audience)) { skipped.push(`Value "${cleanLine.Audience}" is invalid for column "Audience". Must be of one of ${EVENT_AUDIENCE.join(', ')}: ${eventId}`); @@ -594,10 +643,15 @@ export async function csvImport(buffer: Buffer) { const regionId = Number(eventId.split('-')[0].replace(/\D/g, '').replace(/^0+/, '')); - const creator = cleanLine.Creator; + const creator = cleanLine['IST/Creator'] || cleanLine.Creator; + if (!creator) { + errors.push(`No creator listed on import for ${eventId}`); + return false; + } let owner; if (creator) { - owner = await checkUserExists(creator); + owner = await checkUserExistsByEmail(creator); + const policy = new EventReport(owner, { regionId, }); @@ -608,33 +662,70 @@ export async function csvImport(buffer: Buffer) { } } - await checkEventExists(eventId); + const collaborators = []; + const pocs = []; + + if (cleanLine['Designated Region POC for Event/Request']) { + const pocNames = cleanLine['Designated Region POC for Event/Request'].split('/').map((name) => name.trim()); + // eslint-disable-next-line no-restricted-syntax + for await (const pocName of pocNames) { + const poc = await checkUserExistsByName(pocName); + const policy = new EventReport(poc, { + regionId, + }); + + if (!policy.hasPocInRegion()) { + errors.push(`User ${pocName} does not have POC permission in region ${regionId}`); + return false; + } + pocs.push(poc.id); + } + } - const data = mapLineToData(cleanLine); + if (cleanLine['National Centers']) { + const nationalCenterNames = cleanLine['National Centers'].split('\n').map((name) => name.trim()); + // eslint-disable-next-line no-restricted-syntax + for await (const center of nationalCenterNames) { + const collaborator = await checkUserExistsByNationalCenter(center); + const policy = new EventReport(collaborator, { + regionId, + }); + + if (!policy.canWriteInRegion()) { + errors.push(`User ${collaborator.name} does not have permission to write in region ${regionId}`); + return false; + } + collaborators.push(collaborator.id); + } + } + + if (!collaborators.length) { + errors.push(`No collaborators found for ${eventId}`); + return false; + } - data.goals = []; // shape: { grantId: number, goalId: number, sessionId: number }[] - data.goal = ''; + const data = mapLineToData(cleanLine); // Reasons, remove duplicates and invalid values. - data.reasons = [...new Set(data.reasons as string[])]; - data.reasons = (data.reasons as string[]).filter((reason) => REASONS.includes(reason)); + data.reasons = [...new Set(data.reasons as string[])].filter((reason) => REASONS.includes(reason)); // Target Populations, remove duplicates and invalid values. - data.targetPopulations = [...new Set(data.targetPopulations as string[])]; - data.targetPopulations = (data.targetPopulations as string[]).filter((target) => TARGET_POPULATIONS.includes(target)); + data.targetPopulations = [...new Set(data.targetPopulations as string[])].filter((target) => [...TARGET_POPULATIONS, ...EVENT_TARGET_POPULATIONS].includes(target)); await db.EventReportPilot.create({ - collaboratorIds: [], + collaboratorIds: collaborators, ownerId: owner.id, regionId, + pocIds: pocs, data: sequelize.cast(JSON.stringify(data), 'jsonb'), imported: sequelize.cast(JSON.stringify(cleanLine), 'jsonb'), }); return true; } catch (error) { - if (error.message.startsWith('User')) { - errors.push(error.message); + const message = (error.message || '').replace(/\/t/g, ''); + if (message.startsWith('User')) { + errors.push(message); } else if (error.message.startsWith('Event')) { skipped.push(line['Event ID']); } diff --git a/yarn-audit-known-issues b/yarn-audit-known-issues index 49bae093ea..83ed067419 100644 --- a/yarn-audit-known-issues +++ b/yarn-audit-known-issues @@ -1,4 +1,5 @@ {"type":"auditAdvisory","data":{"resolution":{"id":1096366,"path":"email-templates>preview-email>mailparser>nodemailer","dev":false,"optional":false,"bundled":false},"advisory":{"findings":[{"version":"6.7.3","paths":["email-templates>preview-email>mailparser>nodemailer"]}],"metadata":null,"vulnerable_versions":"<=6.9.8","module_name":"nodemailer","severity":"moderate","github_advisory_id":"GHSA-9h6g-pr28-7cqp","cves":[],"access":"public","patched_versions":">=6.9.9","cvss":{"score":5.3,"vectorString":"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L"},"updated":"2024-02-01T17:58:50.000Z","recommendation":"Upgrade to version 6.9.9 or later","cwe":["CWE-1333"],"found_by":null,"deleted":null,"id":1096366,"references":"- https://github.com/nodemailer/nodemailer/security/advisories/GHSA-9h6g-pr28-7cqp\n- https://gist.github.com/francoatmega/890dd5053375333e40c6fdbcc8c58df6\n- https://gist.github.com/francoatmega/9aab042b0b24968d7b7039818e8b2698\n- https://github.com/nodemailer/nodemailer/commit/dd8f5e8a4ddc99992e31df76bcff9c590035cd4a\n- https://github.com/advisories/GHSA-9h6g-pr28-7cqp","created":"2024-01-31T22:42:54.000Z","reported_by":null,"title":"nodemailer ReDoS when trying to send a specially crafted email","npm_advisory_id":null,"overview":"### Summary\nA ReDoS vulnerability occurs when nodemailer tries to parse img files with the parameter `attachDataUrls` set, causing the stuck of event loop. \nAnother flaw was found when nodemailer tries to parse an attachments with a embedded file, causing the stuck of event loop. \n\n### Details\n\nRegex: /^data:((?:[^;]*;)*(?:[^,]*)),(.*)$/\n\nPath: compile -> getAttachments -> _processDataUrl\n\nRegex: /(]* src\\s*=[\\s\"']*)(data:([^;]+);[^\"'>\\s]+)/\n\nPath: _convertDataImages\n\n### PoC\n\nhttps://gist.github.com/francoatmega/890dd5053375333e40c6fdbcc8c58df6\nhttps://gist.github.com/francoatmega/9aab042b0b24968d7b7039818e8b2698\n\n### Impact\n\nReDoS causes the event loop to stuck a specially crafted evil email can cause this problem.\n","url":"https://github.com/advisories/GHSA-9h6g-pr28-7cqp"}}} +{"type":"auditAdvisory","data":{"resolution":{"id":1098307,"path":"newrelic>@newrelic/security-agent>@aws-sdk/client-lambda>@aws-sdk/client-sts>fast-xml-parser","dev":false,"optional":false,"bundled":false},"advisory":{"findings":[{"version":"4.2.5","paths":["newrelic>@newrelic/security-agent>@aws-sdk/client-lambda>@aws-sdk/client-sts>fast-xml-parser"]}],"metadata":null,"vulnerable_versions":"<4.4.1","module_name":"fast-xml-parser","severity":"high","github_advisory_id":"GHSA-mpg4-rc92-vx8v","cves":["CVE-2024-41818"],"access":"public","patched_versions":">=4.4.1","cvss":{"score":7.5,"vectorString":"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H"},"updated":"2024-07-29T19:47:38.000Z","recommendation":"Upgrade to version 4.4.1 or later","cwe":["CWE-400"],"found_by":null,"deleted":null,"id":1098307,"references":"- https://github.com/NaturalIntelligence/fast-xml-parser/security/advisories/GHSA-mpg4-rc92-vx8v\n- https://github.com/NaturalIntelligence/fast-xml-parser/commit/d0bfe8a3a2813a185f39591bbef222212d856164\n- https://github.com/NaturalIntelligence/fast-xml-parser/blob/master/src/v5/valueParsers/currency.js#L10\n- https://nvd.nist.gov/vuln/detail/CVE-2024-41818\n- https://github.com/advisories/GHSA-mpg4-rc92-vx8v","created":"2024-07-29T17:46:16.000Z","reported_by":null,"title":"fast-xml-parser vulnerable to ReDOS at currency parsing","npm_advisory_id":null,"overview":"### Summary\nA ReDOS exists on currency.js was discovered by Gauss Security Labs R&D team.\n\n### Details\nhttps://github.com/NaturalIntelligence/fast-xml-parser/blob/master/src/v5/valueParsers/currency.js#L10\ncontains a vulnerable regex \n\n### PoC\npass the following string '\\t'.repeat(13337) + '.'\n\n### Impact\nDenial of service during currency parsing in experimental version 5 of fast-xml-parser-library\n\nhttps://gauss-security.com","url":"https://github.com/advisories/GHSA-mpg4-rc92-vx8v"}}} {"type":"auditAdvisory","data":{"resolution":{"id":1096410,"path":"xml2json>hoek","dev":false,"bundled":false,"optional":false},"advisory":{"findings":[{"version":"4.2.1","paths":["xml2json>hoek"]},{"version":"5.0.4","paths":["xml2json>joi>hoek"]},{"version":"6.1.3","paths":["xml2json>joi>topo>hoek"]}],"metadata":null,"vulnerable_versions":"<=6.1.3","module_name":"hoek","severity":"high","github_advisory_id":"GHSA-c429-5p7v-vgjp","cves":["CVE-2020-36604"],"access":"public","patched_versions":"<0.0.0","cvss":{"score":8.1,"vectorString":"CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:H"},"updated":"2024-02-07T18:59:37.000Z","recommendation":"None","cwe":["CWE-1321"],"found_by":null,"deleted":null,"id":1096410,"references":"- https://nvd.nist.gov/vuln/detail/CVE-2020-36604\n- https://github.com/hapijs/hoek/issues/352\n- https://github.com/hapijs/hoek/commit/4d0804bc6135ad72afdc5e1ec002b935b2f5216a\n- https://github.com/hapijs/hoek/commit/948baf98634a5c206875b67d11368f133034fa90\n- https://github.com/advisories/GHSA-c429-5p7v-vgjp","created":"2022-09-25T00:00:27.000Z","reported_by":null,"title":"hoek subject to prototype pollution via the clone function.","npm_advisory_id":null,"overview":"hoek versions prior to 8.5.1, and 9.x prior to 9.0.3 are vulnerable to prototype pollution in the clone function. If an object with the __proto__ key is passed to clone() the key is converted to a prototype. This issue has been patched in version 9.0.3, and backported to 8.5.1. ","url":"https://github.com/advisories/GHSA-c429-5p7v-vgjp"}}} {"type":"auditAdvisory","data":{"resolution":{"id":1096410,"path":"xml2json>joi>hoek","dev":false,"bundled":false,"optional":false},"advisory":{"findings":[{"version":"4.2.1","paths":["xml2json>hoek"]},{"version":"5.0.4","paths":["xml2json>joi>hoek"]},{"version":"6.1.3","paths":["xml2json>joi>topo>hoek"]}],"metadata":null,"vulnerable_versions":"<=6.1.3","module_name":"hoek","severity":"high","github_advisory_id":"GHSA-c429-5p7v-vgjp","cves":["CVE-2020-36604"],"access":"public","patched_versions":"<0.0.0","cvss":{"score":8.1,"vectorString":"CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:H"},"updated":"2024-02-07T18:59:37.000Z","recommendation":"None","cwe":["CWE-1321"],"found_by":null,"deleted":null,"id":1096410,"references":"- https://nvd.nist.gov/vuln/detail/CVE-2020-36604\n- https://github.com/hapijs/hoek/issues/352\n- https://github.com/hapijs/hoek/commit/4d0804bc6135ad72afdc5e1ec002b935b2f5216a\n- https://github.com/hapijs/hoek/commit/948baf98634a5c206875b67d11368f133034fa90\n- https://github.com/advisories/GHSA-c429-5p7v-vgjp","created":"2022-09-25T00:00:27.000Z","reported_by":null,"title":"hoek subject to prototype pollution via the clone function.","npm_advisory_id":null,"overview":"hoek versions prior to 8.5.1, and 9.x prior to 9.0.3 are vulnerable to prototype pollution in the clone function. If an object with the __proto__ key is passed to clone() the key is converted to a prototype. This issue has been patched in version 9.0.3, and backported to 8.5.1. ","url":"https://github.com/advisories/GHSA-c429-5p7v-vgjp"}}} {"type":"auditAdvisory","data":{"resolution":{"id":1096410,"path":"xml2json>joi>topo>hoek","dev":false,"bundled":false,"optional":false},"advisory":{"findings":[{"version":"4.2.1","paths":["xml2json>hoek"]},{"version":"5.0.4","paths":["xml2json>joi>hoek"]},{"version":"6.1.3","paths":["xml2json>joi>topo>hoek"]}],"metadata":null,"vulnerable_versions":"<=6.1.3","module_name":"hoek","severity":"high","github_advisory_id":"GHSA-c429-5p7v-vgjp","cves":["CVE-2020-36604"],"access":"public","patched_versions":"<0.0.0","cvss":{"score":8.1,"vectorString":"CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:H"},"updated":"2024-02-07T18:59:37.000Z","recommendation":"None","cwe":["CWE-1321"],"found_by":null,"deleted":null,"id":1096410,"references":"- https://nvd.nist.gov/vuln/detail/CVE-2020-36604\n- https://github.com/hapijs/hoek/issues/352\n- https://github.com/hapijs/hoek/commit/4d0804bc6135ad72afdc5e1ec002b935b2f5216a\n- https://github.com/hapijs/hoek/commit/948baf98634a5c206875b67d11368f133034fa90\n- https://github.com/advisories/GHSA-c429-5p7v-vgjp","created":"2022-09-25T00:00:27.000Z","reported_by":null,"title":"hoek subject to prototype pollution via the clone function.","npm_advisory_id":null,"overview":"hoek versions prior to 8.5.1, and 9.x prior to 9.0.3 are vulnerable to prototype pollution in the clone function. If an object with the __proto__ key is passed to clone() the key is converted to a prototype. This issue has been patched in version 9.0.3, and backported to 8.5.1. ","url":"https://github.com/advisories/GHSA-c429-5p7v-vgjp"}}}