diff --git a/CHANGELOG.md b/CHANGELOG.md index a63165647d..d04991c3a4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,21 @@ # Change Log +## [3.5.1](https://github.com/TheHive-Project/TheHive/milestone/65) (2021-03-01) + +**Fixed bugs:** + +- [Bug] Init Script Syntax Error in Bash Switch-Case [\#1646](https://github.com/TheHive-Project/TheHive/issues/1646) +- [Bug] Issues with case attachments section [\#1651](https://github.com/TheHive-Project/TheHive/issues/1651) +- [Bug] Fix the severity component [\#1654](https://github.com/TheHive-Project/TheHive/issues/1654) +- [Bug] Display problem TH [\#1688](https://github.com/TheHive-Project/TheHive/issues/1688) +- [Bug] Update doesn't work on Elasticsearch 7.11 [\#1799](https://github.com/TheHive-Project/TheHive/issues/1799) + +## [3.4.4](https://github.com/TheHive-Project/TheHive/milestone/63) (2020-10-30) + +**Fixed bugs:** + +- [Security] Update Playframework [\#1604](https://github.com/TheHive-Project/TheHive/issues/1604) + ## [3.5.0](https://github.com/TheHive-Project/TheHive/milestone/61) (2020-10-29) **Implemented enhancements:** diff --git a/build.sbt b/build.sbt index 992f41566f..02259350e8 100644 --- a/build.sbt +++ b/build.sbt @@ -56,7 +56,6 @@ lazy val thehiveCortex = (project in file("thehive-cortex")) lazy val thehive = (project in file(".")) .enablePlugins(PlayScala /*, PlayAkkaHttp2Support*/ ) - .enablePlugins(Bintray) .dependsOn(thehiveBackend, thehiveMisp, thehiveCortex) .aggregate(thehiveBackend, thehiveMisp, thehiveCortex) .settings(projectSettings) @@ -85,31 +84,14 @@ lazy val rpmPackageRelease = (project in file("package/rpm-release")) |GPG key as well as configuration for yum.""".stripMargin, linuxPackageMappings in Rpm := Seq( packageMapping( - file("PGP-PUBLIC-KEY") → "etc/pki/rpm-gpg/GPG-TheHive-Project", - file("package/rpm-release/thehive-rpm.repo") → "/etc/yum.repos.d/thehive-rpm.repo", - file("LICENSE") → "/usr/share/doc/thehive-project-release/LICENSE" + file("PGP-PUBLIC-KEY") -> "etc/pki/rpm-gpg/GPG-TheHive-Project", + file("package/rpm-release/thehive-rpm.repo") -> "/etc/yum.repos.d/thehive-rpm.repo", + file("LICENSE") -> "/usr/share/doc/thehive-project-release/LICENSE" ) ) ) -rpmReleaseFile := { - import scala.sys.process._ - val rpmFile = (packageBin in Rpm in rpmPackageRelease).value - Process( - "rpm" :: - "--define" :: "_gpg_name TheHive Project" :: - "--define" :: "_signature gpg" :: - "--define" :: "__gpg_check_password_cmd /bin/true" :: - "--define" :: "__gpg_sign_cmd %{__gpg} gpg --batch --no-verbose --no-armor --use-agent --no-secmem-warning -u \"%{_gpg_name}\" -sbo %{__signature_filename} %{__plaintext_filename}" :: - "--addsign" :: rpmFile.toString :: - Nil - ).!! - rpmFile -} - -milestoneFilter := ((milestone: Milestone) ⇒ milestone.title.head < '4') - -bintrayOrganization := Some("thehive-project") +milestoneFilter := ((milestone: Milestone) => milestone.title.head < '4') // Front-end // run := { @@ -123,11 +105,3 @@ packageBin := { (packageBin in Debian).value (packageBin in Rpm).value } - -publish := { - (publish in Docker).value - publishRelease.value - publishLatest.value - publishRpm.value - publishDebian.value -} diff --git a/conf/application.sample b/conf/application.sample index 547a1273c6..d8eaf067c4 100644 --- a/conf/application.sample +++ b/conf/application.sample @@ -245,3 +245,14 @@ misp { # purpose = ImportAndExport #} ## <-- Uncomment to complete the configuration } + +misp-thread-pool { + fork-join-executor { + # Min number of threads available for MISP synchronization + parallelism-min = 2 + # Parallelism (threads) ... ceil(available processors * factor) + parallelism-factor = 2.0 + # Max number of threads available for MISP synchronization + parallelism-max = 4 + } +} diff --git a/package/debian/postinst b/package/debian/postinst index 66176644d1..d3612a7a07 100755 --- a/package/debian/postinst +++ b/package/debian/postinst @@ -82,8 +82,8 @@ case "$1" in addUser thehive "" thehive "thehive daemon-user" "/bin/false" # Chown definitions created by SBT Native Packager - - chown thehive:thehive /var/log/thehive + touch /var/log/thehive/application.log + chown -R thehive:thehive /var/log/thehive chown root:thehive /etc/thehive/application.conf /etc/thehive/logback.xml chmod 0640 /etc/thehive/application.conf /etc/thehive/logback.xml mkdir -p /opt/thehive/conf diff --git a/package/docker/Dockerfile b/package/docker/Dockerfile index 6c25d72119..e9d948976d 100644 --- a/package/docker/Dockerfile +++ b/package/docker/Dockerfile @@ -55,6 +55,7 @@ RUN apt update && \ mkdir /etc/thehive && \ cp /opt/thehive/conf/logback.xml /etc/thehive/logback.xml && \ chown -R root:root /opt/thehive && \ + touch /var/log/thehive/application.log && \ chown -R thehive:thehive /var/log/thehive /etc/thehive && \ chmod +x /opt/thehive/entrypoint diff --git a/package/thehive b/package/thehive index 021c8f859e..c553cd47c5 100755 --- a/package/thehive +++ b/package/thehive @@ -42,10 +42,13 @@ check_requirements() { INIT=$(readlink /proc/1/exe 2> /dev/null) case ${INIT} in *systemd*) - echo echo "Your are using systemd. This script should not be used" + echo "Your are using systemd. This script should not be used" + return 1 + ;; *upstart*) - echo echo "Your are using upstart. This script should not be used" - return 1;; + echo "Your are using upstart. This script should not be used" + return 1 + ;; esac return 0 } diff --git a/project/Bintray.scala b/project/Bintray.scala deleted file mode 100644 index 9fabdd7784..0000000000 --- a/project/Bintray.scala +++ /dev/null @@ -1,180 +0,0 @@ -import scala.concurrent.Await -import scala.concurrent.ExecutionContext.Implicits.global -import scala.concurrent.duration.Duration - -import bintray.BintrayCredentials -import bintray.BintrayKeys.{ bintrayEnsureCredentials, bintrayOrganization, bintrayPackage } -import bintry.Client -import com.typesafe.sbt.packager.Keys._ -import com.typesafe.sbt.packager.debian.DebianPlugin.autoImport.Debian -import com.typesafe.sbt.packager.rpm.RpmPlugin.autoImport.Rpm -import com.typesafe.sbt.packager.universal.UniversalPlugin.autoImport.Universal -import dispatch.{ FunctionHandler, Http } -import sbt.Keys._ -import sbt._ - -object Bintray extends AutoPlugin { - - object autoImport { - val publishRelease: TaskKey[Unit] = taskKey[Unit]("Publish binary in bintray") - val publishLatest: TaskKey[Unit] = taskKey[Unit]("Publish latest binary in bintray") - val publishDebian: TaskKey[Unit] = taskKey[Unit]("publish debian package in Bintray") - val publishRpm: TaskKey[Unit] = taskKey[Unit]("publish rpm package in Bintray") - val rpmReleaseFile = taskKey[File]("The rpm release package file") - val publishRpmRelease = taskKey[Unit]("publish rpm release package in Bintray") - } - - import autoImport._ - - override lazy val projectSettings = Seq( - - publishRelease in ThisBuild := { - val file = (packageBin in Universal).value - btPublish(file.getName, - file, - bintrayEnsureCredentials.value, - bintrayOrganization.value, - "binary", - bintrayPackage.value, - (version in ThisBuild).value, - sLog.value) - }, - - publishLatest in ThisBuild := Def.taskDyn { - if ((version in ThisBuild).value.endsWith("-SNAPSHOT")) sys.error("Snapshot version can't be released") - val file = (packageBin in Universal).value - val latestVersion = if (version.value.contains('-')) "latest-beta" else "latest" - val latestName = file.getName.replace(version.value, latestVersion) - if (latestName == file.getName) - Def.task { - sLog.value.warn(s"Latest package name can't be built using package name [$latestName], publish aborted") - } - else Def.task { - removeVersion(bintrayEnsureCredentials.value, - bintrayOrganization.value, - "binary", - bintrayPackage.value, - latestVersion, - sLog.value) - btPublish(latestName, - file, - bintrayEnsureCredentials.value, - bintrayOrganization.value, - "binary", - bintrayPackage.value, - latestVersion, - sLog.value) - } - } - .value, - - publishDebian in ThisBuild := { - if ((version in ThisBuild).value.endsWith("-SNAPSHOT")) sys.error("Snapshot version can't be released") - val file = (debianSign in Debian).value - val bintrayCredentials = bintrayEnsureCredentials.value - btPublish(file.getName, - file, - bintrayCredentials, - bintrayOrganization.value, - "debian-beta", - bintrayPackage.value, - version.value, - sLog.value, - "deb_distribution" → "any", - "deb_component" → "main", - "deb_architecture" → "all" - ) - if (!version.value.contains('-')) - btPublish(file.getName, - file, - bintrayCredentials, - bintrayOrganization.value, - "debian-stable", - bintrayPackage.value, - version.value, - sLog.value, - "deb_distribution" → "any", - "deb_component" → "main", - "deb_architecture" → "all" - ) - }, - - publishRpm in ThisBuild := { - if ((version in ThisBuild).value.endsWith("-SNAPSHOT")) sys.error("Snapshot version can't be released") - val file = (packageBin in Rpm).value - val bintrayCredentials = bintrayEnsureCredentials.value - btPublish(file.getName, - file, - bintrayCredentials, - bintrayOrganization.value, - "rpm-beta", - bintrayPackage.value, - (version in Rpm).value + '-' + (rpmRelease in Rpm).value, - sLog.value) - if (!version.value.contains('-')) - btPublish(file.getName, - file, - bintrayCredentials, - bintrayOrganization.value, - "rpm-stable", - bintrayPackage.value, - (version in Rpm).value + '-' + (rpmRelease in Rpm).value, - sLog.value) - }, - - publishRpmRelease in ThisBuild := { - val file = rpmReleaseFile.value - btPublish(file.getName, - file, - bintrayEnsureCredentials.value, - bintrayOrganization.value, - "rpm-stable", - "thehive-project-release", - "1.1.0", - sLog.value) - } - ) - - private def asStatusAndBody = new FunctionHandler({ r ⇒ (r.getStatusCode, r.getResponseBody) }) - - def removeVersion(credential: BintrayCredentials, - org: Option[String], - repoName: String, - packageName: String, - version: String, - log: Logger): Unit = { - val BintrayCredentials(user, key) = credential - val client: Client = Client(user, key, new Http()) - val repo: Client#Repo = client.repo(org.getOrElse(user), repoName) - Await.result(repo.get(packageName).version(version).delete(asStatusAndBody), Duration.Inf) match { - case (status, body) ⇒ log.info(s"Delete version $packageName $version: $status ($body)") - } - } - - private def btPublish(filename: String, - file: File, - credential: BintrayCredentials, - org: Option[String], - repoName: String, - packageName: String, - version: String, - log: Logger, - additionalParams: (String, String)*): Unit = { - val BintrayCredentials(user, key) = credential - val owner: String = org.getOrElse(user) - val client: Client = Client(user, key, new Http()) - val repo: Client#Repo = client.repo(org.getOrElse(user), repoName) - - - val params = additionalParams - .map { case (k, v) ⇒ s"$k=$v" } - .mkString(";", ";", "") - val upload = repo.get(packageName).version(version).upload(filename + params, file) - - log.info(s"Uploading $file ... (${org.getOrElse(user)}/$repoName/$packageName/$version/$filename$params)") - Await.result(upload(asStatusAndBody), Duration.Inf) match { - case (201, _) ⇒ log.info(s"$file was uploaded to $owner/$packageName@$version") - case (_, fail) ⇒ sys.error(s"failed to upload $file to $owner/$packageName@$version: $fail") - } - } -} \ No newline at end of file diff --git a/project/Dependencies.scala b/project/Dependencies.scala index e3dcc90901..b49eb3add3 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -19,7 +19,7 @@ object Dependencies { val reflections = "org.reflections" % "reflections" % "0.9.11" val zip4j = "net.lingala.zip4j" % "zip4j" % "2.6.0" - val elastic4play = "org.thehive-project" %% "elastic4play" % "1.12.3" + val elastic4play = "org.thehive-project" %% "elastic4play" % "1.13.1" val akkaCluster = "com.typesafe.akka" %% "akka-cluster" % play.core.PlayVersion.akkaVersion val akkaClusterTyped = "com.typesafe.akka" %% "akka-cluster-typed" % play.core.PlayVersion.akkaVersion val akkaClusterTools = "com.typesafe.akka" %% "akka-cluster-tools" % play.core.PlayVersion.akkaVersion diff --git a/project/plugins.sbt b/project/plugins.sbt index 85eabd9ba5..3558e62509 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,6 +2,5 @@ logLevel := Level.Info addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.8.3") -addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.1") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.0") addSbtPlugin("org.thehive-project" % "sbt-github-changelog" % "0.3.0") diff --git a/sbt b/sbt index 42cf87bcf9..375b5bee66 100755 --- a/sbt +++ b/sbt @@ -3,14 +3,42 @@ # A more capable sbt runner, coincidentally also called sbt. # Author: Paul Phillips # https://github.com/paulp/sbt-extras +# +# Generated from http://www.opensource.org/licenses/bsd-license.php +# Copyright (c) 2011, Paul Phillips. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the author nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. set -o pipefail -declare -r sbt_release_version="1.2.8" -declare -r sbt_unreleased_version="1.3.0-RC1" +declare -r sbt_release_version="1.4.7" +declare -r sbt_unreleased_version="1.4.7" -declare -r latest_213="2.13.0-RC3" -declare -r latest_212="2.12.8" +declare -r latest_213="2.13.4" +declare -r latest_212="2.12.12" declare -r latest_211="2.11.12" declare -r latest_210="2.10.7" declare -r latest_29="2.9.3" @@ -18,18 +46,17 @@ declare -r latest_28="2.8.2" declare -r buildProps="project/build.properties" -declare -r sbt_launch_ivy_release_repo="http://repo.typesafe.com/typesafe/ivy-releases" +declare -r sbt_launch_ivy_release_repo="https://repo.typesafe.com/typesafe/ivy-releases" declare -r sbt_launch_ivy_snapshot_repo="https://repo.scala-sbt.org/scalasbt/ivy-snapshots" -declare -r sbt_launch_mvn_release_repo="http://repo.scala-sbt.org/scalasbt/maven-releases" -declare -r sbt_launch_mvn_snapshot_repo="http://repo.scala-sbt.org/scalasbt/maven-snapshots" +declare -r sbt_launch_mvn_release_repo="https://repo1.maven.org/maven2" +declare -r sbt_launch_mvn_snapshot_repo="https://repo.scala-sbt.org/scalasbt/maven-snapshots" -declare -r default_jvm_opts_common="-Xms512m -Xss2m" -declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy" +declare -r default_jvm_opts_common="-Xms512m -Xss2m -XX:MaxInlineLevel=18" +declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy -Dsbt.coursier.home=project/.coursier" declare sbt_jar sbt_dir sbt_create sbt_version sbt_script sbt_new declare sbt_explicit_version declare verbose noshare batch trace_level -declare debugUs declare java_cmd="java" declare sbt_launch_dir="$HOME/.sbt/launchers" @@ -41,11 +68,14 @@ declare -a java_args scalac_args sbt_commands residual_args # args to jvm/sbt via files or environment variables declare -a extra_jvm_opts extra_sbt_opts -echoerr () { echo >&2 "$@"; } -vlog () { [[ -n "$verbose" ]] && echoerr "$@"; } -die () { echo "Aborting: $*" ; exit 1; } +echoerr() { echo >&2 "$@"; } +vlog() { [[ -n "$verbose" ]] && echoerr "$@"; } +die() { + echo "Aborting: $*" + exit 1 +} -setTrapExit () { +setTrapExit() { # save stty and trap exit, to ensure echo is re-enabled if we are interrupted. SBT_STTY="$(stty -g 2>/dev/null)" export SBT_STTY @@ -64,9 +94,12 @@ setTrapExit () { # this seems to cover the bases on OSX, and someone will # have to tell me about the others. -get_script_path () { +get_script_path() { local path="$1" - [[ -L "$path" ]] || { echo "$path" ; return; } + [[ -L "$path" ]] || { + echo "$path" + return + } local -r target="$(readlink "$path")" if [[ "${target:0:1}" == "/" ]]; then @@ -81,7 +114,7 @@ declare -r script_path script_name="${script_path##*/}" declare -r script_name -init_default_option_file () { +init_default_option_file() { local overriding_var="${!1}" local default_file="$2" if [[ ! -r "$default_file" && "$overriding_var" =~ ^@(.*)$ ]]; then @@ -94,66 +127,81 @@ init_default_option_file () { } sbt_opts_file="$(init_default_option_file SBT_OPTS .sbtopts)" +sbtx_opts_file="$(init_default_option_file SBTX_OPTS .sbtxopts)" jvm_opts_file="$(init_default_option_file JVM_OPTS .jvmopts)" -build_props_sbt () { - [[ -r "$buildProps" ]] && \ +build_props_sbt() { + [[ -r "$buildProps" ]] && grep '^sbt\.version' "$buildProps" | tr '=\r' ' ' | awk '{ print $2; }' } -set_sbt_version () { +set_sbt_version() { sbt_version="${sbt_explicit_version:-$(build_props_sbt)}" [[ -n "$sbt_version" ]] || sbt_version=$sbt_release_version export sbt_version } -url_base () { +url_base() { local version="$1" case "$version" in - 0.7.*) echo "http://simple-build-tool.googlecode.com" ;; - 0.10.* ) echo "$sbt_launch_ivy_release_repo" ;; + 0.7.*) echo "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/simple-build-tool" ;; + 0.10.*) echo "$sbt_launch_ivy_release_repo" ;; 0.11.[12]) echo "$sbt_launch_ivy_release_repo" ;; 0.*-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" - echo "$sbt_launch_ivy_snapshot_repo" ;; - 0.*) echo "$sbt_launch_ivy_release_repo" ;; - *-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" - echo "$sbt_launch_mvn_snapshot_repo" ;; - *) echo "$sbt_launch_mvn_release_repo" ;; + echo "$sbt_launch_ivy_snapshot_repo" ;; + 0.*) echo "$sbt_launch_ivy_release_repo" ;; + *-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]T[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmddThhMMss" + echo "$sbt_launch_mvn_snapshot_repo" ;; + *) echo "$sbt_launch_mvn_release_repo" ;; esac } -make_url () { +make_url() { local version="$1" local base="${sbt_launch_repo:-$(url_base "$version")}" case "$version" in - 0.7.*) echo "$base/files/sbt-launch-0.7.7.jar" ;; - 0.10.* ) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; + 0.7.*) echo "$base/sbt-launch-0.7.7.jar" ;; + 0.10.*) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; 0.11.[12]) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; - 0.*) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; - *) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; + 0.*) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; + *) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; esac } -addJava () { vlog "[addJava] arg = '$1'" ; java_args+=("$1"); } -addSbt () { vlog "[addSbt] arg = '$1'" ; sbt_commands+=("$1"); } -addScalac () { vlog "[addScalac] arg = '$1'" ; scalac_args+=("$1"); } -addResidual () { vlog "[residual] arg = '$1'" ; residual_args+=("$1"); } +addJava() { + vlog "[addJava] arg = '$1'" + java_args+=("$1") +} +addSbt() { + vlog "[addSbt] arg = '$1'" + sbt_commands+=("$1") +} +addScalac() { + vlog "[addScalac] arg = '$1'" + scalac_args+=("$1") +} +addResidual() { + vlog "[residual] arg = '$1'" + residual_args+=("$1") +} -addResolver () { addSbt "set resolvers += $1"; } -addDebugger () { addJava "-Xdebug" ; addJava "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"; } -setThisBuild () { +addResolver() { addSbt "set resolvers += $1"; } + +addDebugger() { addJava "-Xdebug" && addJava "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"; } + +setThisBuild() { vlog "[addBuild] args = '$*'" local key="$1" && shift addSbt "set $key in ThisBuild := $*" } -setScalaVersion () { +setScalaVersion() { [[ "$1" == *"-SNAPSHOT" ]] && addResolver 'Resolver.sonatypeRepo("snapshots")' addSbt "++ $1" } -setJavaHome () { +setJavaHome() { java_cmd="$1/bin/java" setThisBuild javaHome "_root_.scala.Some(file(\"$1\"))" export JAVA_HOME="$1" @@ -166,9 +214,9 @@ getJavaVersion() { # java -version on java8 says 1.8.x # but on 9 and 10 it's 9.x.y and 10.x.y. - if [[ "$str" =~ ^1\.([0-9]+)\..*$ ]]; then + if [[ "$str" =~ ^1\.([0-9]+)(\..*)?$ ]]; then echo "${BASH_REMATCH[1]}" - elif [[ "$str" =~ ^([0-9]+)\..*$ ]]; then + elif [[ "$str" =~ ^([0-9]+)(\..*)?$ ]]; then echo "${BASH_REMATCH[1]}" elif [[ -n "$str" ]]; then echoerr "Can't parse java version from: $str" @@ -178,8 +226,8 @@ getJavaVersion() { checkJava() { # Warn if there is a Java version mismatch between PATH and JAVA_HOME/JDK_HOME - [[ -n "$JAVA_HOME" && -e "$JAVA_HOME/bin/java" ]] && java="$JAVA_HOME/bin/java" - [[ -n "$JDK_HOME" && -e "$JDK_HOME/lib/tools.jar" ]] && java="$JDK_HOME/bin/java" + [[ -n "$JAVA_HOME" && -e "$JAVA_HOME/bin/java" ]] && java="$JAVA_HOME/bin/java" + [[ -n "$JDK_HOME" && -e "$JDK_HOME/lib/tools.jar" ]] && java="$JDK_HOME/bin/java" if [[ -n "$java" ]]; then pathJavaVersion=$(getJavaVersion java) @@ -193,31 +241,32 @@ checkJava() { fi } -java_version () { +java_version() { local -r version=$(getJavaVersion "$java_cmd") vlog "Detected Java version: $version" echo "$version" } +is_apple_silicon() { [[ "$(uname -s)" == "Darwin" && "$(uname -m)" == "arm64" ]]; } + # MaxPermSize critical on pre-8 JVMs but incurs noisy warning on 8+ -default_jvm_opts () { +default_jvm_opts() { local -r v="$(java_version)" - if [[ $v -ge 8 ]]; then + if [[ $v -ge 10 ]]; then + if is_apple_silicon; then + # As of Dec 2020, JVM for Apple Silicon (M1) doesn't support JVMCI + echo "$default_jvm_opts_common" + else + echo "$default_jvm_opts_common -XX:+UnlockExperimentalVMOptions -XX:+UseJVMCICompiler" + fi + elif [[ $v -ge 8 ]]; then echo "$default_jvm_opts_common" else echo "-XX:MaxPermSize=384m $default_jvm_opts_common" fi } -build_props_scala () { - if [[ -r "$buildProps" ]]; then - versionLine="$(grep '^build.scala.versions' "$buildProps")" - versionString="${versionLine##build.scala.versions=}" - echo "${versionString%% .*}" - fi -} - -execRunner () { +execRunner() { # print the arguments one to a line, quoting any containing spaces vlog "# Executing command line:" && { for arg; do @@ -235,40 +284,36 @@ execRunner () { setTrapExit if [[ -n "$batch" ]]; then - "$@" < /dev/null + "$@" /dev/null 2>&1; then + if command -v curl >/dev/null 2>&1; then curl --fail --silent --location "$url" --output "$jar" - elif command -v wget > /dev/null 2>&1; then + elif command -v wget >/dev/null 2>&1; then wget -q -O "$jar" "$url" fi } && [[ -r "$jar" ]] } -acquire_sbt_jar () { +acquire_sbt_jar() { { sbt_jar="$(jar_file "$sbt_version")" [[ -r "$sbt_jar" ]] @@ -277,11 +322,66 @@ acquire_sbt_jar () { [[ -r "$sbt_jar" ]] } || { sbt_jar="$(jar_file "$sbt_version")" - download_url "$(make_url "$sbt_version")" "$sbt_jar" + jar_url="$(make_url "$sbt_version")" + + echoerr "Downloading sbt launcher for ${sbt_version}:" + echoerr " From ${jar_url}" + echoerr " To ${sbt_jar}" + + download_url "${jar_url}" "${sbt_jar}" + + case "${sbt_version}" in + 0.*) + vlog "SBT versions < 1.0 do not have published MD5 checksums, skipping check" + echo "" + ;; + *) verify_sbt_jar "${sbt_jar}" ;; + esac } } -usage () { +verify_sbt_jar() { + local jar="${1}" + local md5="${jar}.md5" + md5url="$(make_url "${sbt_version}").md5" + + echoerr "Downloading sbt launcher ${sbt_version} md5 hash:" + echoerr " From ${md5url}" + echoerr " To ${md5}" + + download_url "${md5url}" "${md5}" >/dev/null 2>&1 + + if command -v md5sum >/dev/null 2>&1; then + if echo "$(cat "${md5}") ${jar}" | md5sum -c -; then + rm -rf "${md5}" + return 0 + else + echoerr "Checksum does not match" + return 1 + fi + elif command -v md5 >/dev/null 2>&1; then + if [ "$(md5 -q "${jar}")" == "$(cat "${md5}")" ]; then + rm -rf "${md5}" + return 0 + else + echoerr "Checksum does not match" + return 1 + fi + elif command -v openssl >/dev/null 2>&1; then + if [ "$(openssl md5 -r "${jar}" | awk '{print $1}')" == "$(cat "${md5}")" ]; then + rm -rf "${md5}" + return 0 + else + echoerr "Checksum does not match" + return 1 + fi + else + echoerr "Could not find an MD5 command" + return 1 + fi +} + +usage() { set_sbt_version cat < Run the specified file as a scala script # sbt version (default: sbt.version from $buildProps if present, otherwise $sbt_release_version) - -sbt-force-latest force the use of the latest release of sbt: $sbt_release_version - -sbt-version use the specified version of sbt (default: $sbt_release_version) - -sbt-dev use the latest pre-release version of sbt: $sbt_unreleased_version - -sbt-jar use the specified jar as the sbt launcher - -sbt-launch-dir directory to hold sbt launchers (default: $sbt_launch_dir) - -sbt-launch-repo repo url for downloading sbt launcher jar (default: $(url_base "$sbt_version")) + -sbt-version use the specified version of sbt (default: $sbt_release_version) + -sbt-force-latest force the use of the latest release of sbt: $sbt_release_version + -sbt-dev use the latest pre-release version of sbt: $sbt_unreleased_version + -sbt-jar use the specified jar as the sbt launcher + -sbt-launch-dir directory to hold sbt launchers (default: $sbt_launch_dir) + -sbt-launch-repo repo url for downloading sbt launcher jar (default: $(url_base "$sbt_version")) # scala version (default: as chosen by sbt) - -28 use $latest_28 - -29 use $latest_29 - -210 use $latest_210 - -211 use $latest_211 - -212 use $latest_212 - -213 use $latest_213 - -scala-home use the scala build at the specified directory - -scala-version use the specified version of scala - -binary-version use the specified scala version when searching for dependencies + -28 use $latest_28 + -29 use $latest_29 + -210 use $latest_210 + -211 use $latest_211 + -212 use $latest_212 + -213 use $latest_213 + -scala-home use the scala build at the specified directory + -scala-version use the specified version of scala + -binary-version use the specified scala version when searching for dependencies # java version (default: java from PATH, currently $(java -version 2>&1 | grep version)) - -java-home alternate JAVA_HOME + -java-home alternate JAVA_HOME # passing options to the jvm - note it does NOT use JAVA_OPTS due to pollution # The default set is used if JVM_OPTS is unset and no -jvm-opts file is found - $(default_jvm_opts) - JVM_OPTS environment variable holding either the jvm args directly, or - the reference to a file containing jvm args if given path is prepended by '@' (e.g. '@/etc/jvmopts') - Note: "@"-file is overridden by local '.jvmopts' or '-jvm-opts' argument. - -jvm-opts file containing jvm args (if not given, .jvmopts in project root is used if present) - -Dkey=val pass -Dkey=val directly to the jvm - -J-X pass option -X directly to the jvm (-J is stripped) + $(default_jvm_opts) + JVM_OPTS environment variable holding either the jvm args directly, or + the reference to a file containing jvm args if given path is prepended by '@' (e.g. '@/etc/jvmopts') + Note: "@"-file is overridden by local '.jvmopts' or '-jvm-opts' argument. + -jvm-opts file containing jvm args (if not given, .jvmopts in project root is used if present) + -Dkey=val pass -Dkey=val directly to the jvm + -J-X pass option -X directly to the jvm (-J is stripped) # passing options to sbt, OR to this runner - SBT_OPTS environment variable holding either the sbt args directly, or - the reference to a file containing sbt args if given path is prepended by '@' (e.g. '@/etc/sbtopts') - Note: "@"-file is overridden by local '.sbtopts' or '-sbt-opts' argument. - -sbt-opts file containing sbt args (if not given, .sbtopts in project root is used if present) - -S-X add -X to sbt's scalacOptions (-S is stripped) + SBT_OPTS environment variable holding either the sbt args directly, or + the reference to a file containing sbt args if given path is prepended by '@' (e.g. '@/etc/sbtopts') + Note: "@"-file is overridden by local '.sbtopts' or '-sbt-opts' argument. + -sbt-opts file containing sbt args (if not given, .sbtopts in project root is used if present) + -S-X add -X to sbt's scalacOptions (-S is stripped) + + # passing options exclusively to this runner + SBTX_OPTS environment variable holding either the sbt-extras args directly, or + the reference to a file containing sbt-extras args if given path is prepended by '@' (e.g. '@/etc/sbtxopts') + Note: "@"-file is overridden by local '.sbtxopts' or '-sbtx-opts' argument. + -sbtx-opts file containing sbt-extras args (if not given, .sbtxopts in project root is used if present) EOM + exit 0 } -process_args () { - require_arg () { +process_args() { + require_arg() { local type="$1" local opt="$2" local arg="$3" @@ -368,50 +469,56 @@ process_args () { } while [[ $# -gt 0 ]]; do case "$1" in - -h|-help) usage; exit 0 ;; - -v) verbose=true && shift ;; - -d) addSbt "--debug" && shift ;; - -w) addSbt "--warn" && shift ;; - -q) addSbt "--error" && shift ;; - -x) debugUs=true && shift ;; - -trace) require_arg integer "$1" "$2" && trace_level="$2" && shift 2 ;; - -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; - -no-colors) addJava "-Dsbt.log.noformat=true" && shift ;; - -no-share) noshare=true && shift ;; - -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;; - -sbt-dir) require_arg path "$1" "$2" && sbt_dir="$2" && shift 2 ;; - -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;; - -offline) addSbt "set offline in Global := true" && shift ;; - -jvm-debug) require_arg port "$1" "$2" && addDebugger "$2" && shift 2 ;; - -batch) batch=true && shift ;; - -prompt) require_arg "expr" "$1" "$2" && setThisBuild shellPrompt "(s => { val e = Project.extract(s) ; $2 })" && shift 2 ;; - -script) require_arg file "$1" "$2" && sbt_script="$2" && addJava "-Dsbt.main.class=sbt.ScriptMain" && shift 2 ;; - - -sbt-create) sbt_create=true && shift ;; - -sbt-jar) require_arg path "$1" "$2" && sbt_jar="$2" && shift 2 ;; + -h | -help) usage ;; + -v) verbose=true && shift ;; + -d) addSbt "--debug" && shift ;; + -w) addSbt "--warn" && shift ;; + -q) addSbt "--error" && shift ;; + -x) shift ;; # currently unused + -trace) require_arg integer "$1" "$2" && trace_level="$2" && shift 2 ;; + -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;; + + -no-colors) addJava "-Dsbt.log.noformat=true" && addJava "-Dsbt.color=false" && shift ;; + -sbt-create) sbt_create=true && shift ;; + -sbt-dir) require_arg path "$1" "$2" && sbt_dir="$2" && shift 2 ;; + -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;; + -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; + -no-share) noshare=true && shift ;; + -offline) addSbt "set offline in Global := true" && shift ;; + -jvm-debug) require_arg port "$1" "$2" && addDebugger "$2" && shift 2 ;; + -batch) batch=true && shift ;; + -prompt) require_arg "expr" "$1" "$2" && setThisBuild shellPrompt "(s => { val e = Project.extract(s) ; $2 })" && shift 2 ;; + -script) require_arg file "$1" "$2" && sbt_script="$2" && addJava "-Dsbt.main.class=sbt.ScriptMain" && shift 2 ;; + -sbt-version) require_arg version "$1" "$2" && sbt_explicit_version="$2" && shift 2 ;; - -sbt-force-latest) sbt_explicit_version="$sbt_release_version" && shift ;; - -sbt-dev) sbt_explicit_version="$sbt_unreleased_version" && shift ;; - -sbt-launch-dir) require_arg path "$1" "$2" && sbt_launch_dir="$2" && shift 2 ;; - -sbt-launch-repo) require_arg path "$1" "$2" && sbt_launch_repo="$2" && shift 2 ;; - -scala-version) require_arg version "$1" "$2" && setScalaVersion "$2" && shift 2 ;; - -binary-version) require_arg version "$1" "$2" && setThisBuild scalaBinaryVersion "\"$2\"" && shift 2 ;; - -scala-home) require_arg path "$1" "$2" && setThisBuild scalaHome "_root_.scala.Some(file(\"$2\"))" && shift 2 ;; - -java-home) require_arg path "$1" "$2" && setJavaHome "$2" && shift 2 ;; - -sbt-opts) require_arg path "$1" "$2" && sbt_opts_file="$2" && shift 2 ;; - -jvm-opts) require_arg path "$1" "$2" && jvm_opts_file="$2" && shift 2 ;; - - -D*) addJava "$1" && shift ;; - -J*) addJava "${1:2}" && shift ;; - -S*) addScalac "${1:2}" && shift ;; - -28) setScalaVersion "$latest_28" && shift ;; - -29) setScalaVersion "$latest_29" && shift ;; - -210) setScalaVersion "$latest_210" && shift ;; - -211) setScalaVersion "$latest_211" && shift ;; - -212) setScalaVersion "$latest_212" && shift ;; - -213) setScalaVersion "$latest_213" && shift ;; - new) sbt_new=true && : ${sbt_explicit_version:=$sbt_release_version} && addResidual "$1" && shift ;; - *) addResidual "$1" && shift ;; + -sbt-force-latest) sbt_explicit_version="$sbt_release_version" && shift ;; + -sbt-dev) sbt_explicit_version="$sbt_unreleased_version" && shift ;; + -sbt-jar) require_arg path "$1" "$2" && sbt_jar="$2" && shift 2 ;; + -sbt-launch-dir) require_arg path "$1" "$2" && sbt_launch_dir="$2" && shift 2 ;; + -sbt-launch-repo) require_arg path "$1" "$2" && sbt_launch_repo="$2" && shift 2 ;; + + -28) setScalaVersion "$latest_28" && shift ;; + -29) setScalaVersion "$latest_29" && shift ;; + -210) setScalaVersion "$latest_210" && shift ;; + -211) setScalaVersion "$latest_211" && shift ;; + -212) setScalaVersion "$latest_212" && shift ;; + -213) setScalaVersion "$latest_213" && shift ;; + + -scala-version) require_arg version "$1" "$2" && setScalaVersion "$2" && shift 2 ;; + -binary-version) require_arg version "$1" "$2" && setThisBuild scalaBinaryVersion "\"$2\"" && shift 2 ;; + -scala-home) require_arg path "$1" "$2" && setThisBuild scalaHome "_root_.scala.Some(file(\"$2\"))" && shift 2 ;; + -java-home) require_arg path "$1" "$2" && setJavaHome "$2" && shift 2 ;; + -sbt-opts) require_arg path "$1" "$2" && sbt_opts_file="$2" && shift 2 ;; + -sbtx-opts) require_arg path "$1" "$2" && sbtx_opts_file="$2" && shift 2 ;; + -jvm-opts) require_arg path "$1" "$2" && jvm_opts_file="$2" && shift 2 ;; + + -D*) addJava "$1" && shift ;; + -J*) addJava "${1:2}" && shift ;; + -S*) addScalac "${1:2}" && shift ;; + + new) sbt_new=true && : ${sbt_explicit_version:=$sbt_release_version} && addResidual "$1" && shift ;; + + *) addResidual "$1" && shift ;; esac done } @@ -425,7 +532,7 @@ readConfigFile() { until $end; do read -r || end=true [[ $REPLY =~ ^# ]] || [[ -z $REPLY ]] || echo "$REPLY" - done < "$1" + done <"$1" } # if there are file/environment sbt_opts, process again so we @@ -435,7 +542,19 @@ if [[ -r "$sbt_opts_file" ]]; then while read -r opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbt_opts_file") elif [[ -n "$SBT_OPTS" && ! ("$SBT_OPTS" =~ ^@.*) ]]; then vlog "Using sbt options defined in variable \$SBT_OPTS" - IFS=" " read -r -a extra_sbt_opts <<< "$SBT_OPTS" + IFS=" " read -r -a extra_sbt_opts <<<"$SBT_OPTS" +else + vlog "No extra sbt options have been defined" +fi + +# if there are file/environment sbtx_opts, process again so we +# can supply args to this runner +if [[ -r "$sbtx_opts_file" ]]; then + vlog "Using sbt options defined in file $sbtx_opts_file" + while read -r opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbtx_opts_file") +elif [[ -n "$SBTX_OPTS" && ! ("$SBTX_OPTS" =~ ^@.*) ]]; then + vlog "Using sbt options defined in variable \$SBTX_OPTS" + IFS=" " read -r -a extra_sbt_opts <<<"$SBTX_OPTS" else vlog "No extra sbt options have been defined" fi @@ -454,8 +573,8 @@ checkJava # only exists in 0.12+ setTraceLevel() { case "$sbt_version" in - "0.7."* | "0.10."* | "0.11."* ) echoerr "Cannot set trace level in sbt version $sbt_version" ;; - *) setThisBuild traceLevel "$trace_level" ;; + "0.7."* | "0.10."* | "0.11."*) echoerr "Cannot set trace level in sbt version $sbt_version" ;; + *) setThisBuild traceLevel "$trace_level" ;; esac } @@ -466,12 +585,12 @@ setTraceLevel() { vlog "Detected sbt version $sbt_version" if [[ -n "$sbt_script" ]]; then - residual_args=( "$sbt_script" "${residual_args[@]}" ) + residual_args=("$sbt_script" "${residual_args[@]}") else # no args - alert them there's stuff in here - (( argumentCount > 0 )) || { + ((argumentCount > 0)) || { vlog "Starting $script_name: invoke with -help for other options" - residual_args=( shell ) + residual_args=(shell) } fi @@ -497,7 +616,7 @@ EOM # no jar? download it. [[ -r "$sbt_jar" ]] || acquire_sbt_jar || { # still no jar? uh-oh. - echo "Download failed. Obtain the jar manually and place it at $sbt_jar" + echo "Could not download and verify the launcher. Obtain the jar manually and place it at $sbt_jar" exit 1 } @@ -507,12 +626,12 @@ if [[ -n "$noshare" ]]; then done else case "$sbt_version" in - "0.7."* | "0.10."* | "0.11."* | "0.12."* ) + "0.7."* | "0.10."* | "0.11."* | "0.12."*) [[ -n "$sbt_dir" ]] || { sbt_dir="$HOME/.sbt/$sbt_version" vlog "Using $sbt_dir as sbt dir, -sbt-dir to override." } - ;; + ;; esac if [[ -n "$sbt_dir" ]]; then @@ -525,54 +644,18 @@ if [[ -r "$jvm_opts_file" ]]; then while read -r opt; do extra_jvm_opts+=("$opt"); done < <(readConfigFile "$jvm_opts_file") elif [[ -n "$JVM_OPTS" && ! ("$JVM_OPTS" =~ ^@.*) ]]; then vlog "Using jvm options defined in \$JVM_OPTS variable" - IFS=" " read -r -a extra_jvm_opts <<< "$JVM_OPTS" + IFS=" " read -r -a extra_jvm_opts <<<"$JVM_OPTS" else vlog "Using default jvm options" - IFS=" " read -r -a extra_jvm_opts <<< "$(default_jvm_opts)" + IFS=" " read -r -a extra_jvm_opts <<<"$( default_jvm_opts)" fi # traceLevel is 0.12+ [[ -n "$trace_level" ]] && setTraceLevel -main () { - execRunner "$java_cmd" \ - "${extra_jvm_opts[@]}" \ - "${java_args[@]}" \ - -jar "$sbt_jar" \ - "${sbt_commands[@]}" \ - "${residual_args[@]}" -} - -# sbt inserts this string on certain lines when formatting is enabled: -# val OverwriteLine = "\r\u001BM\u001B[2K" -# ...in order not to spam the console with a million "Resolving" lines. -# Unfortunately that makes it that much harder to work with when -# we're not going to print those lines anyway. We strip that bit of -# line noise, but leave the other codes to preserve color. -mainFiltered () { - local -r excludeRegex=$(grep -E -v '^#|^$' ~/.sbtignore | paste -sd'|' -) - - echoLine () { - local -r line="$1" - local -r line1="${line//\r\x1BM\x1B\[2K//g}" # This strips the OverwriteLine code. - local -r line2="${line1//\x1B\[[0-9;]*[JKmsu]//g}" # This strips all codes - we test regexes against this. - - if [[ $line2 =~ $excludeRegex ]]; then - [[ -n $debugUs ]] && echo "[X] $line1" - else - [[ -n $debugUs ]] && echo " $line1" || echo "$line1" - fi - } - - echoLine "Starting sbt with output filtering enabled." - main | while read -r line; do echoLine "$line"; done -} - -# Only filter if there's a filter file and we don't see a known interactive command. -# Obviously this is super ad hoc but I don't know how to improve on it. Testing whether -# stdin is a terminal is useless because most of my use cases for this filtering are -# exactly when I'm at a terminal, running sbt non-interactively. -shouldFilter () { [[ -f ~/.sbtignore ]] && ! grep -E -q '\b(shell|console|consoleProject)\b' <<<"${residual_args[@]}"; } - -# run sbt -if shouldFilter; then mainFiltered; else main; fi +execRunner "$java_cmd" \ + "${extra_jvm_opts[@]}" \ + "${java_args[@]}" \ + -jar "$sbt_jar" \ + "${sbt_commands[@]}" \ + "${residual_args[@]}" diff --git a/thehive-backend/app/connectors/Connectors.scala b/thehive-backend/app/connectors/Connectors.scala index 2e975975a1..08f88523f7 100644 --- a/thehive-backend/app/connectors/Connectors.scala +++ b/thehive-backend/app/connectors/Connectors.scala @@ -2,20 +2,20 @@ package connectors import scala.collection.immutable -import play.api.libs.json.{ JsObject, Json } +import play.api.libs.json.{JsObject, Json} import play.api.mvc._ import play.api.routing.sird.UrlContext -import play.api.routing.{ Router, SimpleRouter } +import play.api.routing.{Router, SimpleRouter} import com.google.inject.AbstractModule -import javax.inject.{ Inject, Singleton } +import javax.inject.{Inject, Singleton} import models.HealthStatus -import net.codingwell.scalaguice.{ ScalaModule, ScalaMultibinder } +import net.codingwell.scalaguice.{ScalaModule, ScalaMultibinder} trait Connector { val name: String val router: Router - def status: JsObject = Json.obj("enabled" → true) + def status: JsObject = Json.obj("enabled" -> true) def health: HealthStatus.Type = HealthStatus.Ok } @@ -24,10 +24,10 @@ class ConnectorRouter @Inject()(connectors: immutable.Set[Connector], actionBuil def get(connectorName: String): Option[Connector] = connectors.find(_.name == connectorName) def routes: PartialFunction[RequestHeader, Handler] = { - case request @ p"/$connector/$path<.*>" ⇒ + case request @ p"/$connector/$path<.*>" => get(connector) .flatMap(_.router.withPrefix(s"/$connector/").handlerFor(request)) - .getOrElse(actionBuilder { _ ⇒ + .getOrElse(actionBuilder { _ => Results.NotFound(s"connector $connector not found") }) } diff --git a/thehive-backend/app/controllers/AlertCtrl.scala b/thehive-backend/app/controllers/AlertCtrl.scala index 8f6e22f56d..21ed5846b5 100644 --- a/thehive-backend/app/controllers/AlertCtrl.scala +++ b/thehive-backend/app/controllers/AlertCtrl.scala @@ -35,8 +35,8 @@ class AlertCtrl @Inject()( private[AlertCtrl] lazy val logger = Logger(getClass) @Timed - def create(): Action[Fields] = authenticated(Roles.alert).async(fieldsBodyParser) { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def create(): Action[Fields] = authenticated(Roles.alert).async(fieldsBodyParser) { implicit request => + executionContextSrv.withDefault { implicit ec => alertSrv .create( request @@ -46,61 +46,61 @@ class AlertCtrl @Inject()( .unset("status") .unset("follow") ) - .map(alert ⇒ renderer.toOutput(CREATED, alert)) + .map(alert => renderer.toOutput(CREATED, alert)) } } @Timed - def mergeWithCase(alertId: String, caseId: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ - executionContextSrv.withCustom("longTask") { implicit ec ⇒ + def mergeWithCase(alertId: String, caseId: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => + executionContextSrv.withCustom("longTask") { implicit ec => for { - alert ← alertSrv.get(alertId) - caze ← caseSrv.get(caseId) - updatedCaze ← alertSrv.mergeWithCase(alert, caze) + alert <- alertSrv.get(alertId) + caze <- caseSrv.get(caseId) + updatedCaze <- alertSrv.mergeWithCase(alert, caze) } yield renderer.toOutput(CREATED, updatedCaze) } } @Timed - def bulkMergeWithCase: Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ - executionContextSrv.withCustom("longTask") { implicit ec ⇒ + def bulkMergeWithCase: Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => + executionContextSrv.withCustom("longTask") { implicit ec => val caseId = request.body.getString("caseId").getOrElse(throw BadRequestError("Parameter \"caseId\" is missing")) val alertIds = request.body.getStrings("alertIds").getOrElse(throw BadRequestError("Parameter \"alertIds\" is missing")) for { - alerts ← Future.traverse(alertIds)(alertSrv.get) - caze ← caseSrv.get(caseId) - updatedCaze ← alertSrv.bulkMergeWithCase(alerts, caze) + alerts <- Future.traverse(alertIds)(alertSrv.get) + caze <- caseSrv.get(caseId) + updatedCaze <- alertSrv.bulkMergeWithCase(alerts, caze) } yield renderer.toOutput(CREATED, updatedCaze) } } @Timed - def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request => + executionContextSrv.withDefault { implicit ec => val withStats = request .queryString .get("nstats") .flatMap(_.headOption) - .exists(v ⇒ Try(v.toBoolean).getOrElse(v == "1")) + .exists(v => Try(v.toBoolean).getOrElse(v == "1")) val withSimilarity = request .queryString .get("similarity") .flatMap(_.headOption) - .exists(v ⇒ Try(v.toBoolean).getOrElse(v == "1")) + .exists(v => Try(v.toBoolean).getOrElse(v == "1")) for { - alert ← alertSrv.get(id) - alertsWithStats ← auxSrv.apply(alert, 0, withStats, removeUnaudited = false) - similarCases ← if (withSimilarity) + alert <- alertSrv.get(id) + alertsWithStats <- auxSrv.apply(alert, 0, withStats, removeUnaudited = false) + similarCases <- if (withSimilarity) alertSrv .similarCases(alert) - .map(sc ⇒ Json.obj("similarCases" → Json.toJson(sc))) + .map(sc => Json.obj("similarCases" -> Json.toJson(sc))) else Future.successful(JsObject.empty) - similarArtifacts ← if (withSimilarity) + similarArtifacts <- if (withSimilarity) alertSrv .alertArtifactsWithSeen(alert) - .map(aws ⇒ Json.obj("artifacts" → aws)) + .map(aws => Json.obj("artifacts" -> aws)) else Future.successful(JsObject.empty) } yield { renderer.toOutput(OK, alertsWithStats ++ similarCases ++ similarArtifacts) @@ -109,48 +109,48 @@ class AlertCtrl @Inject()( } @Timed - def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => + executionContextSrv.withDefault { implicit ec => alertSrv .update(id, request.body) - .map { alert ⇒ + .map { alert => renderer.toOutput(OK, alert) } } } @Timed - def bulkUpdate(): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ - executionContextSrv.withCustom("longTask") { implicit ec ⇒ - request.body.getStrings("ids").fold(Future.successful(Ok(JsArray()))) { ids ⇒ - alertSrv.bulkUpdate(ids, request.body.unset("ids")).map(multiResult ⇒ renderer.toMultiOutput(OK, multiResult)) + def bulkUpdate(): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => + executionContextSrv.withCustom("longTask") { implicit ec => + request.body.getStrings("ids").fold(Future.successful(Ok(JsArray()))) { ids => + alertSrv.bulkUpdate(ids, request.body.unset("ids")).map(multiResult => renderer.toMultiOutput(OK, multiResult)) } } } @Timed - def delete(id: String, force: Option[Boolean]): Action[AnyContent] = authenticated(Roles.write).async { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def delete(id: String, force: Option[Boolean]): Action[AnyContent] = authenticated(Roles.write).async { implicit request => + executionContextSrv.withDefault { implicit ec => alertSrv .delete(id, force.getOrElse(false)) - .map(_ ⇒ NoContent) + .map(_ => NoContent) } } @Timed - def bulkDelete(): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request ⇒ - executionContextSrv.withCustom("longTask") { implicit ec ⇒ - request.body.getStrings("ids").fold(Future.successful(NoContent)) { ids ⇒ + def bulkDelete(): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request => + executionContextSrv.withCustom("longTask") { implicit ec => + request.body.getStrings("ids").fold(Future.successful(NoContent)) { ids => Future .traverse(ids)(alertSrv.delete(_, request.body.getBoolean("force").getOrElse(false))) - .map(_ ⇒ NoContent) + .map(_ => NoContent) } } } @Timed - def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => + executionContextSrv.withDefault { implicit ec => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -164,8 +164,8 @@ class AlertCtrl @Inject()( } @Timed - def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => + executionContextSrv.withDefault { implicit ec => val query = request .body .getValue("query") @@ -175,73 +175,73 @@ class AlertCtrl @Inject()( .getValue("stats") .getOrElse(throw BadRequestError("Parameter \"stats\" is missing")) .as[Seq[Agg]] - alertSrv.stats(query, aggs).map(s ⇒ Ok(s)) + alertSrv.stats(query, aggs).map(s => Ok(s)) } } @Timed - def markAsRead(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def markAsRead(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request => + executionContextSrv.withDefault { implicit ec => for { - alert ← alertSrv.get(id) - updatedAlert ← alertSrv.markAsRead(alert) + alert <- alertSrv.get(id) + updatedAlert <- alertSrv.markAsRead(alert) } yield renderer.toOutput(OK, updatedAlert) } } @Timed - def markAsUnread(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def markAsUnread(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request => + executionContextSrv.withDefault { implicit ec => for { - alert ← alertSrv.get(id) - updatedAlert ← alertSrv.markAsUnread(alert) + alert <- alertSrv.get(id) + updatedAlert <- alertSrv.markAsUnread(alert) } yield renderer.toOutput(OK, updatedAlert) } } @Timed - def createCase(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def createCase(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => + executionContextSrv.withDefault { implicit ec => for { - alert ← alertSrv.get(id) + alert <- alertSrv.get(id) customCaseTemplate = request .body .getString("caseTemplate") .orElse(alert.caseTemplate()) - caze ← alertSrv.createCase(alert, customCaseTemplate) + caze <- alertSrv.createCase(alert, customCaseTemplate) } yield renderer.toOutput(CREATED, caze) } } @Timed - def followAlert(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def followAlert(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request => + executionContextSrv.withDefault { implicit ec => alertSrv .setFollowAlert(id, follow = true) - .map { alert ⇒ + .map { alert => renderer.toOutput(OK, alert) } } } @Timed - def unfollowAlert(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def unfollowAlert(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request => + executionContextSrv.withDefault { implicit ec => alertSrv .setFollowAlert(id, follow = false) - .map { alert ⇒ + .map { alert => renderer.toOutput(OK, alert) } } } @Timed - def fixStatus(): Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def fixStatus(): Action[AnyContent] = authenticated(Roles.admin).async { implicit request => + executionContextSrv.withDefault { implicit ec => alertSrv .fixStatus() - .map(_ ⇒ NoContent) + .map(_ => NoContent) } } diff --git a/thehive-backend/app/controllers/ArtifactCtrl.scala b/thehive-backend/app/controllers/ArtifactCtrl.scala index 646c49c1e0..1c97a5fc2e 100644 --- a/thehive-backend/app/controllers/ArtifactCtrl.scala +++ b/thehive-backend/app/controllers/ArtifactCtrl.scala @@ -85,7 +85,7 @@ class ArtifactCtrl @Inject()( files .filterNot(_.isDirectory) .flatMap(extractAndCheckSize(zipFile, _)) - .map { fiv ⇒ + .map { fiv => fields .unset("isZip") .unset("zipPassword") @@ -96,22 +96,22 @@ class ArtifactCtrl @Inject()( def getFieldsFromAttachment(fields: Fields, attachment: JsValue): Future[Seq[Fields]] = { val artifactFields = for { - attachmentId ← (attachment \ "id").asOpt[String] - name ← (attachment \ "name").asOpt[String] - contentType ← (attachment \ "contentType").asOpt[String] + attachmentId <- (attachment \ "id").asOpt[String] + name <- (attachment \ "name").asOpt[String] + contentType <- (attachment \ "contentType").asOpt[String] } yield { for { - hashes ← attachmentSrv.getHashes(attachmentId) - size ← attachmentSrv.getSize(attachmentId).recover { - case _: NoSuchElementException ⇒ 0 // workaround until elastic4play#93 is fixed + hashes <- attachmentSrv.getHashes(attachmentId) + size <- attachmentSrv.getSize(attachmentId).recover { + case _: NoSuchElementException => 0 // workaround until elastic4play#93 is fixed } } yield fields.set("attachment", AttachmentInputValue(name, hashes, size.toLong, contentType, attachmentId)) } - artifactFields.fold[Future[Seq[Fields]]](Future.successful(Nil))(_.map(f ⇒ Seq(f))) + artifactFields.fold[Future[Seq[Fields]]](Future.successful(Nil))(_.map(f => Seq(f))) } @Timed - def create(caseId: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def create(caseId: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => val fields = request.body val data = fields .getStrings("data") @@ -123,66 +123,66 @@ class ArtifactCtrl @Inject()( fields .get("attachment") .map { - case FileInputValue(_, filepath, _) if fields.getBoolean("isZip").getOrElse(false) ⇒ + case FileInputValue(_, filepath, _) if fields.getBoolean("isZip").getOrElse(false) => Future.successful(getFieldsFromZipFile(fields, filepath)) - case _: FileInputValue ⇒ Future.successful(Seq(fields)) - case JsonInputValue(JsArray(attachments)) ⇒ - Future.traverse(attachments)(attachment ⇒ getFieldsFromAttachment(fields, attachment)).map(_.flatten) - case JsonInputValue(attachment) ⇒ getFieldsFromAttachment(fields, attachment) - case other ⇒ Future.failed(InvalidFormatAttributeError("attachment", "attachment/file", other)) + case _: FileInputValue => Future.successful(Seq(fields)) + case JsonInputValue(JsArray(attachments)) => + Future.traverse(attachments)(attachment => getFieldsFromAttachment(fields, attachment)).map(_.flatten) + case JsonInputValue(attachment) => getFieldsFromAttachment(fields, attachment) + case other => Future.failed(InvalidFormatAttributeError("attachment", "attachment/file", other)) } - .map { fields ⇒ + .map { fields => fields - .flatMap(f ⇒ artifactSrv.create(caseId, f)) - .map(multiResult ⇒ renderer.toMultiOutput(CREATED, multiResult)) + .flatMap(f => artifactSrv.create(caseId, f)) + .map(multiResult => renderer.toMultiOutput(CREATED, multiResult)) } .getOrElse { artifactSrv .create(caseId, fields.unset("isZip").unset("zipPassword")) - .map(artifact ⇒ renderer.toOutput(CREATED, artifact)) + .map(artifact => renderer.toOutput(CREATED, artifact)) } } else if (data.length == 1) { artifactSrv .create(caseId, fields.set("data", data.head).unset("isZip").unset("zipPassword")) - .map(artifact ⇒ renderer.toOutput(CREATED, artifact)) + .map(artifact => renderer.toOutput(CREATED, artifact)) } else { val multiFields = data.map(fields.set("data", _).unset("isZip").unset("zipPassword")) artifactSrv .create(caseId, multiFields) - .map(multiResult ⇒ renderer.toMultiOutput(CREATED, multiResult)) + .map(multiResult => renderer.toMultiOutput(CREATED, multiResult)) } } @Timed - def get(id: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { _ ⇒ + def get(id: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { _ => artifactSrv .get(id) - .map(artifact ⇒ renderer.toOutput(OK, artifact)) + .map(artifact => renderer.toOutput(OK, artifact)) } @Timed - def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => artifactSrv .update(id, request.body.unset("attachment")) - .map(artifact ⇒ renderer.toOutput(OK, artifact)) + .map(artifact => renderer.toOutput(OK, artifact)) } @Timed - def bulkUpdate(): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ - request.body.getStrings("ids").fold(Future.successful(Ok(JsArray()))) { ids ⇒ - artifactSrv.bulkUpdate(ids, request.body.unset("ids").unset("attachment")).map(multiResult ⇒ renderer.toMultiOutput(OK, multiResult)) + def bulkUpdate(): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => + request.body.getStrings("ids").fold(Future.successful(Ok(JsArray()))) { ids => + artifactSrv.bulkUpdate(ids, request.body.unset("ids").unset("attachment")).map(multiResult => renderer.toMultiOutput(OK, multiResult)) } } @Timed - def delete(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request ⇒ + def delete(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request => artifactSrv .delete(id) - .map(_ ⇒ NoContent) + .map(_ => NoContent) } @Timed - def findInCase(caseId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def findInCase(caseId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => import org.elastic4play.services.QueryDSL._ val childQuery = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val query = and(childQuery, withParent("case", caseId)) @@ -194,7 +194,7 @@ class ArtifactCtrl @Inject()( } @Timed - def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -207,8 +207,8 @@ class ArtifactCtrl @Inject()( } @Timed - def findSimilar(artifactId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ - artifactSrv.get(artifactId).flatMap { artifact ⇒ + def findSimilar(artifactId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => + artifactSrv.get(artifactId).flatMap { artifact => val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -219,9 +219,9 @@ class ArtifactCtrl @Inject()( } @Timed - def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val aggs = request.body.getValue("stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Seq[Agg]] - artifactSrv.stats(query, aggs).map(s ⇒ Ok(s)) + artifactSrv.stats(query, aggs).map(s => Ok(s)) } } diff --git a/thehive-backend/app/controllers/AttachmentCtrl.scala b/thehive-backend/app/controllers/AttachmentCtrl.scala index f76ef41eee..10462bc65c 100644 --- a/thehive-backend/app/controllers/AttachmentCtrl.scala +++ b/thehive-backend/app/controllers/AttachmentCtrl.scala @@ -3,20 +3,20 @@ package controllers import java.nio.file.Files import akka.stream.scaladsl.FileIO -import javax.inject.{ Inject, Singleton } +import javax.inject.{Inject, Singleton} import models.Roles import net.lingala.zip4j.ZipFile import net.lingala.zip4j.model.ZipParameters -import net.lingala.zip4j.model.enums.{ CompressionLevel, EncryptionMethod } +import net.lingala.zip4j.model.enums.{CompressionLevel, EncryptionMethod} import org.elastic4play.Timed import org.elastic4play.controllers.Authenticated import org.elastic4play.models.AttachmentAttributeFormat -import org.elastic4play.services.{ AttachmentSrv, ExecutionContextSrv } +import org.elastic4play.services.{AttachmentSrv, ExecutionContextSrv} import play.api.http.HttpEntity import play.api.libs.Files.DefaultTemporaryFileCreator import play.api.mvc._ -import play.api.{ Configuration, mvc } +import play.api.{mvc, Configuration} /** * Controller used to access stored attachments (plain or zipped) @@ -54,8 +54,8 @@ class AttachmentCtrl( * open the document directly. It must be used only for safe file */ @Timed("controllers.AttachmentCtrl.download") - def download(hash: String, name: Option[String]): Action[AnyContent] = authenticated(Roles.read) { _ ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def download(hash: String, name: Option[String]): Action[AnyContent] = authenticated(Roles.read) { _ => + executionContextSrv.withDefault { implicit ec => if (hash.startsWith("{{")) // angularjs hack NoContent else if (!name.getOrElse("").intersect(AttachmentAttributeFormat.forbiddenChar).isEmpty) @@ -64,7 +64,7 @@ class AttachmentCtrl( Result( header = ResponseHeader( 200, - Map("Content-Disposition" → s"""attachment; filename="${name.getOrElse(hash)}"""", "Content-Transfer-Encoding" → "binary") + Map("Content-Disposition" -> s"""attachment; filename="${name.getOrElse(hash)}"""", "Content-Transfer-Encoding" -> "binary") ), body = HttpEntity.Streamed(attachmentSrv.source(hash), None, None) ) @@ -77,32 +77,32 @@ class AttachmentCtrl( * File name can be specified (zip extension is append) */ @Timed("controllers.AttachmentCtrl.downloadZip") - def downloadZip(hash: String, name: Option[String]): Action[AnyContent] = authenticated(Roles.read) { _ ⇒ - executionContextSrv.withDefault { implicit ec ⇒ + def downloadZip(hash: String, name: Option[String]): Action[AnyContent] = authenticated(Roles.read) { _ => + executionContextSrv.withDefault { implicit ec => if (!name.getOrElse("").intersect(AttachmentAttributeFormat.forbiddenChar).isEmpty) BadRequest("File name is invalid") else { val f = tempFileCreator.create("zip", hash).path Files.delete(f) - val zipFile = new ZipFile(f.toFile) + val zipFile = new ZipFile(f.toFile) zipFile.setPassword(password.toCharArray) - val zipParams = new ZipParameters - zipParams.setCompressionLevel(CompressionLevel.FASTEST) + val zipParams = new ZipParameters + zipParams.setCompressionLevel(CompressionLevel.FASTEST) zipParams.setEncryptFiles(true) zipParams.setEncryptionMethod(EncryptionMethod.ZIP_STANDARD) - // zipParams.setsetPassword(password.toCharArray) + // zipParams.setsetPassword(password.toCharArray) zipParams.setFileNameInZip(name.getOrElse(hash)) - // zipParams.setSourceExternalStream(true) + // zipParams.setSourceExternalStream(true) zipFile.addStream(attachmentSrv.stream(hash), zipParams) Result( header = ResponseHeader( 200, Map( - "Content-Disposition" → s"""attachment; filename="${name.getOrElse(hash)}.zip"""", - "Content-Type" → "application/zip", - "Content-Transfer-Encoding" → "binary", - "Content-Length" → Files.size(f).toString + "Content-Disposition" -> s"""attachment; filename="${name.getOrElse(hash)}.zip"""", + "Content-Type" -> "application/zip", + "Content-Transfer-Encoding" -> "binary", + "Content-Length" -> Files.size(f).toString ) ), body = HttpEntity.Streamed(FileIO.fromPath(f), Some(Files.size(f)), Some("application/zip")) diff --git a/thehive-backend/app/controllers/AuditCtrl.scala b/thehive-backend/app/controllers/AuditCtrl.scala index 49002b4803..a3b3428b84 100644 --- a/thehive-backend/app/controllers/AuditCtrl.scala +++ b/thehive-backend/app/controllers/AuditCtrl.scala @@ -30,13 +30,13 @@ class AuditCtrl @Inject()( * Return audit logs. For each item, include ancestor entities */ @Timed - def flow(rootId: Option[String], count: Option[Int]): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ + def flow(rootId: Option[String], count: Option[Int]): Action[AnyContent] = authenticated(Roles.read).async { _ => val (audits, total) = auditSrv(rootId.filterNot(_ == "any"), count.getOrElse(10)) renderer.toOutput(OK, audits, total) } @Timed - def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -49,7 +49,7 @@ class AuditCtrl @Inject()( } @Timed - def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request .body .getValue("query") @@ -59,7 +59,7 @@ class AuditCtrl @Inject()( .getValue("stats") .getOrElse(throw BadRequestError("Parameter \"stats\" is missing")) .as[Seq[Agg]] - auditSrv.stats(query, aggs).map(s ⇒ Ok(s)) + auditSrv.stats(query, aggs).map(s => Ok(s)) } } diff --git a/thehive-backend/app/controllers/AuthenticationCtrl.scala b/thehive-backend/app/controllers/AuthenticationCtrl.scala index 42a127388b..e1dc39375c 100644 --- a/thehive-backend/app/controllers/AuthenticationCtrl.scala +++ b/thehive-backend/app/controllers/AuthenticationCtrl.scala @@ -25,13 +25,13 @@ class AuthenticationCtrl @Inject()( ) extends AbstractController(components) { @Timed - def login: Action[Fields] = Action.async(fieldsBodyParser) { implicit request ⇒ + def login: Action[Fields] = Action.async(fieldsBodyParser) { implicit request => dbIndex.getIndexStatus.flatMap { - case false ⇒ Future.successful(Results.Status(520)) - case _ ⇒ + case false => Future.successful(Results.Status(520)) + case _ => for { - authContext ← authSrv.authenticate(request.body.getString("user").getOrElse("TODO"), request.body.getString("password").getOrElse("TODO")) - user ← userSrv.get(authContext.userId) + authContext <- authSrv.authenticate(request.body.getString("user").getOrElse("TODO"), request.body.getString("password").getOrElse("TODO")) + user <- userSrv.get(authContext.userId) } yield { if (user.status() == UserStatus.Ok) authenticated.setSessingUser(Ok, authContext) @@ -42,21 +42,21 @@ class AuthenticationCtrl @Inject()( } @Timed - def ssoLogin: Action[AnyContent] = Action.async { implicit request ⇒ + def ssoLogin: Action[AnyContent] = Action.async { implicit request => dbIndex.getIndexStatus.flatMap { - case false ⇒ Future.successful(Results.Status(520)) - case _ ⇒ + case false => Future.successful(Results.Status(520)) + case _ => authSrv .authenticate() .flatMap { - case Right(authContext) ⇒ - userSrv.get(authContext.userId).map { user ⇒ + case Right(authContext) => + userSrv.get(authContext.userId).map { user => if (user.status() == UserStatus.Ok) authenticated.setSessingUser(Redirect(configuration.get[String]("play.http.context").stripSuffix("/") + "/index.html"), authContext) else throw AuthorizationError("Your account is locked") } - case Left(result) ⇒ Future.successful(result) + case Left(result) => Future.successful(result) } } } diff --git a/thehive-backend/app/controllers/CaseCtrl.scala b/thehive-backend/app/controllers/CaseCtrl.scala index 1160f20686..9ae822e2e7 100644 --- a/thehive-backend/app/controllers/CaseCtrl.scala +++ b/thehive-backend/app/controllers/CaseCtrl.scala @@ -40,73 +40,73 @@ class CaseCtrl @Inject()( private[CaseCtrl] lazy val logger = Logger(getClass) @Timed - def create(): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def create(): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => request .body .getString("template") - .map { templateName ⇒ + .map { templateName => caseTemplateSrv .getByName(templateName) .map(Some(_)) - .recover { case _ ⇒ None } + .recover { case _ => None } } .getOrElse(Future.successful(None)) - .flatMap { caseTemplate ⇒ + .flatMap { caseTemplate => caseSrv.create(request.body.unset("template"), caseTemplate) } - .map(caze ⇒ renderer.toOutput(CREATED, caze)) + .map(caze => renderer.toOutput(CREATED, caze)) } @Timed - def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request ⇒ + def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request => val withStats = for { - statsValues ← request.queryString.get("nstats") - firstValue ← statsValues.headOption + statsValues <- request.queryString.get("nstats") + firstValue <- statsValues.headOption } yield Try(firstValue.toBoolean).getOrElse(firstValue == "1") for { - caze ← caseSrv.get(id) - casesWithStats ← auxSrv.apply(caze, 0, withStats.getOrElse(false), removeUnaudited = false) + caze <- caseSrv.get(id) + casesWithStats <- auxSrv.apply(caze, 0, withStats.getOrElse(false), removeUnaudited = false) } yield renderer.toOutput(OK, casesWithStats) } @Timed - def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => val isCaseClosing = request.body.getString("status").contains(CaseStatus.Resolved.toString) for { // Closing the case, so lets close the open tasks - caze ← caseSrv.update(id, request.body) - _ ← if (isCaseClosing) taskSrv.closeTasksOfCase(id) else Future.successful(Nil) // FIXME log warning if closedTasks contains errors + caze <- caseSrv.update(id, request.body) + _ <- if (isCaseClosing) taskSrv.closeTasksOfCase(id) else Future.successful(Nil) // FIXME log warning if closedTasks contains errors } yield renderer.toOutput(OK, caze) } @Timed - def bulkUpdate(): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def bulkUpdate(): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => val isCaseClosing = request.body.getString("status").contains(CaseStatus.Resolved.toString) - request.body.getStrings("ids").fold(Future.successful(Ok(JsArray()))) { ids ⇒ + request.body.getStrings("ids").fold(Future.successful(Ok(JsArray()))) { ids => if (isCaseClosing) taskSrv.closeTasksOfCase(ids: _*) // FIXME log warning if closedTasks contains errors - caseSrv.bulkUpdate(ids, request.body.unset("ids")).map(multiResult ⇒ renderer.toMultiOutput(OK, multiResult)) + caseSrv.bulkUpdate(ids, request.body.unset("ids")).map(multiResult => renderer.toMultiOutput(OK, multiResult)) } } @Timed - def delete(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ + def delete(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request => caseSrv .delete(id) - .map(_ ⇒ NoContent) + .map(_ => NoContent) } @Timed - def realDelete(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ + def realDelete(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request => caseSrv .realDelete(id) - .map(_ ⇒ NoContent) + .map(_ => NoContent) } @Timed - def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -119,35 +119,35 @@ class CaseCtrl @Inject()( } @Timed - def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val aggs = request.body.getValue("stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Seq[Agg]] - caseSrv.stats(query, aggs).map(s ⇒ Ok(s)) + caseSrv.stats(query, aggs).map(s => Ok(s)) } @Timed - def linkedCases(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ + def linkedCases(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ => caseSrv .linkedCases(id) .runWith(Sink.seq) - .map { cases ⇒ + .map { cases => val casesList = cases .sortWith { - case ((c1, _), (c2, _)) ⇒ c1.startDate().after(c2.startDate()) + case ((c1, _), (c2, _)) => c1.startDate().after(c2.startDate()) } .map { - case (caze, artifacts) ⇒ + case (caze, artifacts) => Json.toJson(caze).as[JsObject] - "description" + - ("linkedWith" → Json.toJson(artifacts)) + - ("linksCount" → Json.toJson(artifacts.size)) + ("linkedWith" -> Json.toJson(artifacts)) + + ("linksCount" -> Json.toJson(artifacts.size)) } renderer.toOutput(OK, casesList) } } @Timed - def merge(caseId1: String, caseId2: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request ⇒ - caseMergeSrv.merge(caseId1, caseId2).map { caze ⇒ + def merge(caseId1: String, caseId2: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request => + caseMergeSrv.merge(caseId1, caseId2).map { caze => renderer.toOutput(OK, caze) } } diff --git a/thehive-backend/app/controllers/CaseTemplateCtrl.scala b/thehive-backend/app/controllers/CaseTemplateCtrl.scala index 091b4b45b3..ca52ec0830 100644 --- a/thehive-backend/app/controllers/CaseTemplateCtrl.scala +++ b/thehive-backend/app/controllers/CaseTemplateCtrl.scala @@ -29,35 +29,35 @@ class CaseTemplateCtrl @Inject()( with Status { @Timed - def create: Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request ⇒ + def create: Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request => caseTemplateSrv .create(request.body) - .map(caze ⇒ renderer.toOutput(CREATED, caze)) + .map(caze => renderer.toOutput(CREATED, caze)) } @Timed - def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ + def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ => caseTemplateSrv .get(id) - .map(caze ⇒ renderer.toOutput(OK, caze)) + .map(caze => renderer.toOutput(OK, caze)) } @Timed - def update(id: String): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request ⇒ + def update(id: String): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request => caseTemplateSrv .update(id, request.body) - .map(caze ⇒ renderer.toOutput(OK, caze)) + .map(caze => renderer.toOutput(OK, caze)) } @Timed - def delete(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ + def delete(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request => caseTemplateSrv .delete(id) - .map(_ ⇒ NoContent) + .map(_ => NoContent) } @Timed - def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) diff --git a/thehive-backend/app/controllers/CustomFieldsCtrl.scala b/thehive-backend/app/controllers/CustomFieldsCtrl.scala index 4975e243a8..9ccc7b0339 100644 --- a/thehive-backend/app/controllers/CustomFieldsCtrl.scala +++ b/thehive-backend/app/controllers/CustomFieldsCtrl.scala @@ -34,21 +34,21 @@ class CustomFieldsCtrl @Inject()( .getItems[JsObject] ._1 .collect { - case (_, value) if (value \ "reference").asOpt[String].contains(customField) ⇒ (value \ "type").as[String] + case (_, value) if (value \ "reference").asOpt[String].contains(customField) => (value \ "type").as[String] } .runWith(Sink.head) - .recoverWith { case _ ⇒ Future.failed(NotFoundError(s"CustomField $customField not found")) } - .flatMap { customFieldType ⇒ + .recoverWith { case _ => Future.failed(NotFoundError(s"CustomField $customField not found")) } + .flatMap { customFieldType => val filter = and("relations" in ("case", "alert", "caseTemplate"), contains(s"customFields.$customField.$customFieldType")) dbfind( - indexName ⇒ search(indexName).query(filter.query).aggregations(termsAgg("t","relations")) - ).map { searchResponse ⇒ + indexName => search(indexName).query(filter.query).aggregations(termsAgg("t", "relations")) + ).map { searchResponse => val buckets = searchResponse.aggregations.result[Terms]("t").buckets val total = buckets.map(_.docCount).sum - val result = buckets.map(b ⇒ b.key → JsNumber(b.docCount)) :+ ("total" → JsNumber(total)) + val result = buckets.map(b => b.key -> JsNumber(b.docCount)) :+ ("total" -> JsNumber(total)) Ok(JsObject(result)) } - .recover { case _ ⇒ Ok(Json.obj("total" → 0)) } + .recover { case _ => Ok(Json.obj("total" -> 0)) } } } } diff --git a/thehive-backend/app/controllers/DBListCtrl.scala b/thehive-backend/app/controllers/DBListCtrl.scala index 74496673d6..1325d7ec19 100644 --- a/thehive-backend/app/controllers/DBListCtrl.scala +++ b/thehive-backend/app/controllers/DBListCtrl.scala @@ -23,56 +23,56 @@ class DBListCtrl @Inject()( ) extends AbstractController(components) { @Timed("controllers.DBListCtrl.list") - def list: Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ - dblists.listAll.map { listNames ⇒ + def list: Action[AnyContent] = authenticated(Roles.read).async { _ => + dblists.listAll.map { listNames => renderer.toOutput(OK, listNames) } } @Timed("controllers.DBListCtrl.listItems") - def listItems(listName: String): Action[AnyContent] = authenticated(Roles.read) { _ ⇒ + def listItems(listName: String): Action[AnyContent] = authenticated(Roles.read) { _ => val (src, _) = dblists(listName).getItems[JsValue] val items = src - .map { case (id, value) ⇒ s""""$id":$value""" } + .map { case (id, value) => s""""$id":$value""" } .intersperse("{", ",", "}") Ok.chunked(items).as("application/json") } @Timed("controllers.DBListCtrl.addItem") - def addItem(listName: String): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request ⇒ - request.body.getValue("value").fold(Future.successful(NoContent)) { value ⇒ - dblists(listName).addItem(value).map { item ⇒ + def addItem(listName: String): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request => + request.body.getValue("value").fold(Future.successful(NoContent)) { value => + dblists(listName).addItem(value).map { item => renderer.toOutput(OK, item.id) } } } @Timed("controllers.DBListCtrl.deleteItem") - def deleteItem(itemId: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ - dblists.deleteItem(itemId).map { _ ⇒ + def deleteItem(itemId: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request => + dblists.deleteItem(itemId).map { _ => NoContent } } @Timed("controllers.DBListCtrl.udpateItem") - def updateItem(itemId: String): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request ⇒ + def updateItem(itemId: String): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request => request .body .getValue("value") - .map { value ⇒ + .map { value => for { - item ← dblists.getItem(itemId) - _ ← dblists.deleteItem(item) - newItem ← dblists(item.dblist).addItem(value) + item <- dblists.getItem(itemId) + _ <- dblists.deleteItem(item) + newItem <- dblists(item.dblist).addItem(value) } yield renderer.toOutput(OK, newItem.id) } .getOrElse(Future.failed(MissingAttributeError("value"))) } @Timed("controllers.DBListCtrl.itemExists") - def itemExists(listName: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def itemExists(listName: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val itemKey = request.body.getString("key").getOrElse(throw MissingAttributeError("Parameter key is missing")) val itemValue = request.body.getValue("value").getOrElse(throw MissingAttributeError("Parameter value is missing")) - dblists(listName).exists(itemKey, itemValue).map(r ⇒ Ok(Json.obj("found" → r))) + dblists(listName).exists(itemKey, itemValue).map(r => Ok(Json.obj("found" -> r))) } } diff --git a/thehive-backend/app/controllers/DashboardCtrl.scala b/thehive-backend/app/controllers/DashboardCtrl.scala index b4311da67a..0e50675b41 100644 --- a/thehive-backend/app/controllers/DashboardCtrl.scala +++ b/thehive-backend/app/controllers/DashboardCtrl.scala @@ -33,24 +33,24 @@ class DashboardCtrl @Inject()( private[DashboardCtrl] lazy val logger = Logger(getClass) @Timed - def create(): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def create(): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => dashboardSrv .create(request.body) - .map(dashboard ⇒ renderer.toOutput(CREATED, dashboard)) + .map(dashboard => renderer.toOutput(CREATED, dashboard)) } @Timed - def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ - dashboardSrv.get(id).map { dashboard ⇒ + def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ => + dashboardSrv.get(id).map { dashboard => renderer.toOutput(OK, dashboard) } } @Timed - def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => for { - dashboard ← dashboardSrv.get(id) - updatedDashboard ← if (dashboard.createdBy == request.userId || request.roles.contains(Roles.admin)) + dashboard <- dashboardSrv.get(id) + updatedDashboard <- if (dashboard.createdBy == request.userId || request.roles.contains(Roles.admin)) dashboardSrv.update(dashboard, request.body) else Future.failed(AuthorizationError("You can't update this dashboard, you are not the owner")) @@ -58,10 +58,10 @@ class DashboardCtrl @Inject()( } @Timed - def delete(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request ⇒ + def delete(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request => for { - dashboard ← dashboardSrv.get(id) - _ ← if (dashboard.createdBy == request.userId || request.roles.contains(Roles.admin)) + dashboard <- dashboardSrv.get(id) + _ <- if (dashboard.createdBy == request.userId || request.roles.contains(Roles.admin)) dashboardSrv.delete(id) else Future.failed(AuthorizationError("You can't update this dashboard, you are not the owner")) @@ -69,7 +69,7 @@ class DashboardCtrl @Inject()( } @Timed - def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => import org.elastic4play.services.QueryDSL._ val query = request.body.getValue("query").fold[QueryDef]("status" ~!= "Deleted")(_.as[QueryDef]) val range = request.body.getString("range") @@ -80,9 +80,9 @@ class DashboardCtrl @Inject()( } @Timed - def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val aggs = request.body.getValue("stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Seq[Agg]] - dashboardSrv.stats(query, aggs).map(s ⇒ Ok(s)) + dashboardSrv.stats(query, aggs).map(s => Ok(s)) } } diff --git a/thehive-backend/app/controllers/DescribeCtrl.scala b/thehive-backend/app/controllers/DescribeCtrl.scala index 5e00dbc370..cde2b1bdea 100644 --- a/thehive-backend/app/controllers/DescribeCtrl.scala +++ b/thehive-backend/app/controllers/DescribeCtrl.scala @@ -26,16 +26,16 @@ class DescribeCtrl @Inject()( private def modelToJson(model: BaseModelDef): JsObject = { val attributeDefinitions = model.attributes.flatMap { - case attribute: Attribute[t] ⇒ attribute.format.definition(dblists, attribute) - } ++ model.computedMetrics.keys.map { computedMetricName ⇒ + case attribute: Attribute[t] => attribute.format.definition(dblists, attribute) + } ++ model.computedMetrics.keys.map { computedMetricName => AttributeDefinition(s"computed.$computedMetricName", "number", s"Computed metric $computedMetricName", Nil, Nil) } - Json.obj("label" → model.label, "path" → model.path, "attributes" → attributeDefinitions) + Json.obj("label" -> model.label, "path" -> model.path, "attributes" -> attributeDefinitions) } - def describe(modelName: String): Action[AnyContent] = authenticated(Roles.read) { _ ⇒ + def describe(modelName: String): Action[AnyContent] = authenticated(Roles.read) { _ => modelSrv(modelName) - .map { model ⇒ + .map { model => renderer.toOutput(OK, modelToJson(model)) } .getOrElse(NotFound(s"Model $modelName not found")) @@ -43,11 +43,11 @@ class DescribeCtrl @Inject()( private val allModels: Seq[String] = Seq("case", "case_artifact", "case_task", "case_task_log", "alert", "case_artifact_job", "audit", "action") - def describeAll: Action[AnyContent] = authenticated(Roles.read) { _ ⇒ + def describeAll: Action[AnyContent] = authenticated(Roles.read) { _ => val entityDefinitions = modelSrv .list .collect { - case model if allModels.contains(model.modelName) ⇒ model.modelName → modelToJson(model) + case model if allModels.contains(model.modelName) => model.modelName -> modelToJson(model) } renderer.toOutput(OK, JsObject(entityDefinitions)) } diff --git a/thehive-backend/app/controllers/LogCtrl.scala b/thehive-backend/app/controllers/LogCtrl.scala index ff4879dfd4..6041defb6a 100644 --- a/thehive-backend/app/controllers/LogCtrl.scala +++ b/thehive-backend/app/controllers/LogCtrl.scala @@ -27,35 +27,35 @@ class LogCtrl @Inject()( with Status { @Timed - def create(taskId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def create(taskId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => logSrv .create(taskId, request.body) - .map(log ⇒ renderer.toOutput(CREATED, log)) + .map(log => renderer.toOutput(CREATED, log)) } @Timed - def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ + def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ => logSrv .get(id) - .map(log ⇒ renderer.toOutput(OK, log)) + .map(log => renderer.toOutput(OK, log)) } @Timed - def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => logSrv .update(id, request.body) - .map(log ⇒ renderer.toOutput(OK, log)) + .map(log => renderer.toOutput(OK, log)) } @Timed - def delete(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request ⇒ + def delete(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request => logSrv .delete(id) - .map(_ ⇒ Ok("")) + .map(_ => Ok("")) } @Timed - def findInTask(taskId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def findInTask(taskId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => import org.elastic4play.services.QueryDSL._ val childQuery = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val query = and(childQuery, parent("case_task", withId(taskId))) @@ -67,7 +67,7 @@ class LogCtrl @Inject()( } @Timed - def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -77,9 +77,9 @@ class LogCtrl @Inject()( } @Timed - def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val aggs = request.body.getValue("stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Seq[Agg]] - logSrv.stats(query, aggs).map(s ⇒ Ok(s)) + logSrv.stats(query, aggs).map(s => Ok(s)) } } diff --git a/thehive-backend/app/controllers/SearchCtrl.scala b/thehive-backend/app/controllers/SearchCtrl.scala index 400b537258..c6fadd7359 100644 --- a/thehive-backend/app/controllers/SearchCtrl.scala +++ b/thehive-backend/app/controllers/SearchCtrl.scala @@ -29,7 +29,7 @@ class SearchCtrl @Inject()( with Status { @Timed - def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => import org.elastic4play.services.QueryDSL._ val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") @@ -48,7 +48,7 @@ class SearchCtrl @Inject()( } @Timed - def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => import org.elastic4play.services.QueryDSL._ val globalQuery = request.body.getValue("query").flatMap(_.asOpt[QueryDef]).toList Future @@ -58,14 +58,14 @@ class SearchCtrl @Inject()( .getValue("stats") .getOrElse(throw BadRequestError("Parameter \"stats\" is missing")) .as[Seq[JsObject]] - ) { statsJson ⇒ + ) { statsJson => val query = (statsJson \ "query").asOpt[QueryDef].toList val agg = (statsJson \ "stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Seq[Agg]] val modelName = (statsJson \ "model").getOrElse(throw BadRequestError("Parameter \"model\" is missing")).as[String] val model = modelSrv.apply(modelName).getOrElse(throw BadRequestError(s"Model $modelName doesn't exist")) findSrv.apply(model, and(globalQuery ::: query), agg: _*) } - .map { statsResults ⇒ + .map { statsResults => renderer.toOutput(OK, statsResults.reduceOption(_ deepMerge _).getOrElse(JsObject.empty)) } } diff --git a/thehive-backend/app/controllers/StatusCtrl.scala b/thehive-backend/app/controllers/StatusCtrl.scala index 63b24b1281..ce047146ca 100644 --- a/thehive-backend/app/controllers/StatusCtrl.scala +++ b/thehive-backend/app/controllers/StatusCtrl.scala @@ -45,26 +45,26 @@ class StatusCtrl @Inject()( def get: Action[AnyContent] = Action { Ok( Json.obj( - "versions" → Json.obj( - "TheHive" → getVersion(classOf[models.Case]), - "Elastic4Play" → getVersion(classOf[Timed]), - "Play" → getVersion(classOf[AbstractController]), - "Elastic4s" → getVersion(classOf[ElasticDsl]), - "ElasticSearch" → getVersion(classOf[Node]) + "versions" -> Json.obj( + "TheHive" -> getVersion(classOf[models.Case]), + "Elastic4Play" -> getVersion(classOf[Timed]), + "Play" -> getVersion(classOf[AbstractController]), + "Elastic4s" -> getVersion(classOf[ElasticDsl]), + "ElasticSearch" -> getVersion(classOf[Node]) ), - "connectors" → JsObject(connectors.map(c ⇒ c.name → c.status).toSeq), - "health" → Json.obj("elasticsearch" → clusterStatusName), - "config" → Json.obj( - "protectDownloadsWith" → configuration.get[String]("datastore.attachment.password"), - "authType" → (authSrv match { - case multiAuthSrv: MultiAuthSrv ⇒ - multiAuthSrv.authProviders.map { a ⇒ + "connectors" -> JsObject(connectors.map(c => c.name -> c.status).toSeq), + "health" -> Json.obj("elasticsearch" -> clusterStatusName), + "config" -> Json.obj( + "protectDownloadsWith" -> configuration.get[String]("datastore.attachment.password"), + "authType" -> (authSrv match { + case multiAuthSrv: MultiAuthSrv => + multiAuthSrv.authProviders.map { a => JsString(a.name) } - case _ ⇒ JsString(authSrv.name) + case _ => JsString(authSrv.name) }), - "capabilities" → authSrv.capabilities.map(c ⇒ JsString(c.toString)), - "ssoAutoLogin" → JsBoolean(configuration.getOptional[Boolean]("auth.sso.autologin").getOrElse(false)) + "capabilities" -> authSrv.capabilities.map(c => JsString(c.toString)), + "ssoAutoLogin" -> JsBoolean(configuration.getOptional[Boolean]("auth.sso.autologin").getOrElse(false)) ) ) ) @@ -73,13 +73,13 @@ class StatusCtrl @Inject()( @Timed("controllers.StatusCtrl.health") def health: Action[AnyContent] = Action.async { for { - dbStatusInt ← dbIndex.getClusterStatus + dbStatusInt <- dbIndex.getClusterStatus dbStatus = dbStatusInt match { - case 0 ⇒ HealthStatus.Ok - case 1 ⇒ HealthStatus.Warning - case _ ⇒ HealthStatus.Error + case 0 => HealthStatus.Ok + case 1 => HealthStatus.Warning + case _ => HealthStatus.Error } - distinctStatus = connectors.map(c ⇒ c.health) + dbStatus + distinctStatus = connectors.map(c => c.health) + dbStatus globalStatus = if (distinctStatus.contains(HealthStatus.Ok)) { if (distinctStatus.size > 1) HealthStatus.Warning else HealthStatus.Ok } else if (distinctStatus.contains(HealthStatus.Error)) HealthStatus.Error diff --git a/thehive-backend/app/controllers/StreamCtrl.scala b/thehive-backend/app/controllers/StreamCtrl.scala index 756236b735..d30044c053 100644 --- a/thehive-backend/app/controllers/StreamCtrl.scala +++ b/thehive-backend/app/controllers/StreamCtrl.scala @@ -3,7 +3,7 @@ package controllers import akka.actor.{ActorIdentity, ActorSystem, Identify, Props} import akka.cluster.pubsub.DistributedPubSub import akka.cluster.pubsub.DistributedPubSubMediator.{Put, Send} -import akka.pattern.{AskTimeoutException, ask} +import akka.pattern.{ask, AskTimeoutException} import akka.util.Timeout import javax.inject.{Inject, Singleton} import models.Roles @@ -83,44 +83,44 @@ class StreamCtrl( * This call waits up to "refresh", if there is no event, return empty response */ @Timed("controllers.StreamCtrl.get") - def get(id: String): Action[AnyContent] = Action.async { implicit request ⇒ + def get(id: String): Action[AnyContent] = Action.async { implicit request => implicit val timeout: Timeout = Timeout(refresh + 1.second) if (!isValidStreamId(id)) { Future.successful(BadRequest("Invalid stream id")) } else { val futureStatus = authenticated.expirationStatus(request) match { - case ExpirationError if !migrationSrv.isMigrating ⇒ - userSrv.getInitialUser(request).recoverWith { case _ ⇒ authenticated.getFromApiKey(request) }.map(_ ⇒ OK) - case _: ExpirationWarning ⇒ Future.successful(220) - case _ ⇒ Future.successful(OK) + case ExpirationError if !migrationSrv.isMigrating => + userSrv.getInitialUser(request).recoverWith { case _ => authenticated.getFromApiKey(request) }.map(_ => OK) + case _: ExpirationWarning => Future.successful(220) + case _ => Future.successful(OK) } // Check if stream actor exists mediator .ask(Send(s"/user/stream-$id", Identify(1), localAffinity = false))(Timeout(2.seconds)) .flatMap { - case ActorIdentity(1, Some(_)) ⇒ - futureStatus.flatMap { status ⇒ + case ActorIdentity(1, Some(_)) => + futureStatus.flatMap { status => (mediator ? Send(s"/user/stream-$id", StreamActor.GetOperations, localAffinity = false)) map { - case StreamMessages(operations) ⇒ renderer.toOutput(status, operations) - case m ⇒ InternalServerError(s"Unexpected message : $m (${m.getClass})") + case StreamMessages(operations) => renderer.toOutput(status, operations) + case m => InternalServerError(s"Unexpected message : $m (${m.getClass})") } } - case _ ⇒ Future.successful(renderer.toOutput(NOT_FOUND, Json.obj("type" → "StreamNotFound", "message" → s"Stream $id doesn't exist"))) + case _ => Future.successful(renderer.toOutput(NOT_FOUND, Json.obj("type" -> "StreamNotFound", "message" -> s"Stream $id doesn't exist"))) } .recover { - case _: AskTimeoutException ⇒ renderer.toOutput(NOT_FOUND, Json.obj("type" → "StreamNotFound", "message" → s"Stream $id doesn't exist")) + case _: AskTimeoutException => renderer.toOutput(NOT_FOUND, Json.obj("type" -> "StreamNotFound", "message" -> s"Stream $id doesn't exist")) } } } @Timed("controllers.StreamCtrl.status") - def status: Action[AnyContent] = Action { implicit request ⇒ + def status: Action[AnyContent] = Action { implicit request => val status = authenticated.expirationStatus(request) match { - case ExpirationWarning(duration) ⇒ Json.obj("remaining" → duration.toSeconds, "warning" → true) - case ExpirationError ⇒ Json.obj("remaining" → 0, "warning" → true) - case ExpirationOk(duration) ⇒ Json.obj("remaining" → duration.toSeconds, "warning" → false) + case ExpirationWarning(duration) => Json.obj("remaining" -> duration.toSeconds, "warning" -> true) + case ExpirationError => Json.obj("remaining" -> 0, "warning" -> true) + case ExpirationOk(duration) => Json.obj("remaining" -> duration.toSeconds, "warning" -> false) } Ok(status) } diff --git a/thehive-backend/app/controllers/TaskCtrl.scala b/thehive-backend/app/controllers/TaskCtrl.scala index 730548103e..e3d69647a6 100644 --- a/thehive-backend/app/controllers/TaskCtrl.scala +++ b/thehive-backend/app/controllers/TaskCtrl.scala @@ -29,35 +29,35 @@ class TaskCtrl @Inject()( with Status { @Timed - def create(caseId: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def create(caseId: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => taskSrv .create(caseId, request.body) - .map(task ⇒ renderer.toOutput(CREATED, task)) + .map(task => renderer.toOutput(CREATED, task)) } @Timed - def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ + def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ => taskSrv .get(id) - .map(task ⇒ renderer.toOutput(OK, task)) + .map(task => renderer.toOutput(OK, task)) } @Timed - def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => taskSrv .update(id, request.body) - .map(task ⇒ renderer.toOutput(OK, task)) + .map(task => renderer.toOutput(OK, task)) } @Timed - def delete(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request ⇒ + def delete(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request => taskSrv .delete(id) - .map(_ ⇒ NoContent) + .map(_ => NoContent) } @Timed - def findInCase(caseId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def findInCase(caseId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => import org.elastic4play.services.QueryDSL._ val childQuery = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val query = and(childQuery, withParent("case", caseId)) @@ -69,7 +69,7 @@ class TaskCtrl @Inject()( } @Timed - def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -82,9 +82,9 @@ class TaskCtrl @Inject()( } @Timed - def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val aggs = request.body.getValue("stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Seq[Agg]] - taskSrv.stats(query, aggs).map(s ⇒ Ok(s)) + taskSrv.stats(query, aggs).map(s => Ok(s)) } } diff --git a/thehive-backend/app/controllers/UserCtrl.scala b/thehive-backend/app/controllers/UserCtrl.scala index 2965a74bcb..737e7d54fc 100644 --- a/thehive-backend/app/controllers/UserCtrl.scala +++ b/thehive-backend/app/controllers/UserCtrl.scala @@ -34,23 +34,23 @@ class UserCtrl @Inject()( private[UserCtrl] lazy val logger = Logger(getClass) @Timed - def create: Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request ⇒ + def create: Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request => userSrv .create(request.body) - .map(user ⇒ renderer.toOutput(CREATED, user)) + .map(user => renderer.toOutput(CREATED, user)) } @Timed - def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ + def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ => userSrv .get(id) - .map { user ⇒ + .map { user => renderer.toOutput(OK, user) } } @Timed - def update(id: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def update(id: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => if (id == request.authContext.userId || request.authContext.roles.contains(Roles.admin)) { if (request.body.contains("password")) { Future.failed(AuthorizationError("You must use dedicated API (setPassword, changePassword) to update password")) @@ -61,7 +61,7 @@ class UserCtrl @Inject()( } else if (request.body.contains("status") && !request.authContext.roles.contains(Roles.admin)) { Future.failed(AuthorizationError("You are not permitted to change user status")) } else { - userSrv.update(id, request.body.unset("password").unset("key")).map { user ⇒ + userSrv.update(id, request.body.unset("password").unset("key")).map { user => renderer.toOutput(OK, user) } } @@ -71,24 +71,24 @@ class UserCtrl @Inject()( } @Timed - def setPassword(login: String): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request ⇒ + def setPassword(login: String): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request => request .body .getString("password") - .fold(Future.failed[Result](MissingAttributeError("password"))) { password ⇒ - authSrv.setPassword(login, password).map(_ ⇒ NoContent) + .fold(Future.failed[Result](MissingAttributeError("password"))) { password => + authSrv.setPassword(login, password).map(_ => NoContent) } } @Timed - def changePassword(login: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def changePassword(login: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => if (login == request.authContext.userId) { val fields = request.body - fields.getString("password").fold(Future.failed[Result](MissingAttributeError("password"))) { password ⇒ - fields.getString("currentPassword").fold(Future.failed[Result](MissingAttributeError("currentPassword"))) { currentPassword ⇒ + fields.getString("password").fold(Future.failed[Result](MissingAttributeError("password"))) { password => + fields.getString("currentPassword").fold(Future.failed[Result](MissingAttributeError("currentPassword"))) { currentPassword => authSrv .changePassword(request.authContext.userId, currentPassword, password) - .map(_ ⇒ NoContent) + .map(_ => NoContent) } } } else @@ -96,28 +96,28 @@ class UserCtrl @Inject()( } @Timed - def delete(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ + def delete(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request => userSrv .delete(id) - .map(_ ⇒ NoContent) + .map(_ => NoContent) } @Timed - def currentUser: Action[AnyContent] = Action.async { implicit request ⇒ + def currentUser: Action[AnyContent] = Action.async { implicit request => for { - authContext ← authenticated.getContext(request) - user ← userSrv.get(authContext.userId) + authContext <- authenticated.getContext(request) + user <- userSrv.get(authContext.userId) preferences = Try(Json.parse(user.preferences())).recover { - case _ ⇒ + case _ => logger.warn(s"User ${authContext.userId} has invalid preference format: ${user.preferences()}") JsObject.empty }.get - json = user.toJson + ("preferences" → preferences) + json = user.toJson + ("preferences" -> preferences) } yield renderer.toOutput(OK, json) } @Timed - def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -126,17 +126,17 @@ class UserCtrl @Inject()( } @Timed - def getKey(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ + def getKey(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request => authSrv.getKey(id).map(Ok(_)) } @Timed - def removeKey(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ - authSrv.removeKey(id).map(_ ⇒ Ok) + def removeKey(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request => + authSrv.removeKey(id).map(_ => Ok) } @Timed - def renewKey(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ + def renewKey(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request => authSrv.renewKey(id).map(Ok(_)) } } diff --git a/thehive-backend/app/global/Filters.scala b/thehive-backend/app/global/Filters.scala index 18c7d8a52a..c494a9e901 100644 --- a/thehive-backend/app/global/Filters.scala +++ b/thehive-backend/app/global/Filters.scala @@ -5,7 +5,7 @@ import play.api.http.SessionConfiguration import play.api.libs.crypto.CSRFTokenSigner import play.api.mvc.RequestHeader import play.filters.csrf.CSRF.{ErrorHandler, TokenProvider} -import play.filters.csrf.{CSRFConfig, CSRFFilter ⇒ PlayCSRFFilter} +import play.filters.csrf.{CSRFConfig, CSRFFilter => PlayCSRFFilter} import akka.stream.Materializer import javax.inject.{Inject, Provider, Singleton} diff --git a/thehive-backend/app/global/TheHive.scala b/thehive-backend/app/global/TheHive.scala index 276aa661eb..533c491a32 100644 --- a/thehive-backend/app/global/TheHive.scala +++ b/thehive-backend/app/global/TheHive.scala @@ -21,7 +21,7 @@ import services.mappers.{MultiUserMapperSrv, UserMapper} import org.elastic4play.models.BaseModelDef import org.elastic4play.services.auth.MultiAuthSrv -import org.elastic4play.services.{AuthSrv, MigrationOperations, UserSrv ⇒ EUserSrv} +import org.elastic4play.services.{AuthSrv, MigrationOperations, UserSrv => EUserSrv} class TheHive(environment: Environment, val configuration: Configuration) extends AbstractModule with ScalaModule with AkkaGuiceSupport { private[TheHive] lazy val logger = Logger(s"module") @@ -50,8 +50,8 @@ class TheHive(environment: Environment, val configuration: Configuration) extend reflectionClasses .getSubTypesOf(classOf[BaseModelDef]) .asScala - .filterNot(c ⇒ Modifier.isAbstract(c.getModifiers)) - .foreach { modelClass ⇒ + .filterNot(c => Modifier.isAbstract(c.getModifiers)) + .foreach { modelClass => logger.info(s"Loading model $modelClass") modelBindings.addBinding.to(modelClass) if (classOf[AuditedModel].isAssignableFrom(modelClass)) { @@ -62,18 +62,18 @@ class TheHive(environment: Environment, val configuration: Configuration) extend reflectionClasses .getSubTypesOf(classOf[AuthSrv]) .asScala - .filterNot(c ⇒ Modifier.isAbstract(c.getModifiers) || c.isMemberClass) - .filterNot(c ⇒ c == classOf[MultiAuthSrv] || c == classOf[TheHiveAuthSrv]) - .foreach { authSrvClass ⇒ + .filterNot(c => Modifier.isAbstract(c.getModifiers) || c.isMemberClass) + .filterNot(c => c == classOf[MultiAuthSrv] || c == classOf[TheHiveAuthSrv]) + .foreach { authSrvClass => authBindings.addBinding.to(authSrvClass) } reflectionClasses .getSubTypesOf(classOf[UserMapper]) .asScala - .filterNot(c ⇒ Modifier.isAbstract(c.getModifiers) || c.isMemberClass) - .filterNot(c ⇒ c == classOf[MultiUserMapperSrv]) - .foreach(mapperCls ⇒ ssoMapperBindings.addBinding.to(mapperCls)) + .filterNot(c => Modifier.isAbstract(c.getModifiers) || c.isMemberClass) + .filterNot(c => c == classOf[MultiUserMapperSrv]) + .foreach(mapperCls => ssoMapperBindings.addBinding.to(mapperCls)) bind[MigrationOperations].to[Migration] bind[AuthSrv].to[TheHiveAuthSrv] diff --git a/thehive-backend/app/models/Alert.scala b/thehive-backend/app/models/Alert.scala index f45c45db98..58165c3ad6 100644 --- a/thehive-backend/app/models/Alert.scala +++ b/thehive-backend/app/models/Alert.scala @@ -5,7 +5,18 @@ import java.util.Date import javax.inject.Singleton import models.JsonFormat.alertStatusFormat import org.elastic4play.controllers.JsonInputValue -import org.elastic4play.models.{Attribute, AttributeDef, BaseEntity, EntityDef, HiveEnumeration, ModelDef, MultiAttributeFormat, OptionalAttributeFormat, AttributeFormat => F, AttributeOption => O} +import org.elastic4play.models.{ + Attribute, + AttributeDef, + BaseEntity, + EntityDef, + HiveEnumeration, + ModelDef, + MultiAttributeFormat, + OptionalAttributeFormat, + AttributeFormat => F, + AttributeOption => O +} import org.elastic4play.utils.Hasher import org.elastic4play.{AttributeCheckingError, InvalidFormatAttributeError} import play.api.Logger @@ -21,7 +32,7 @@ object AlertStatus extends Enumeration with HiveEnumeration { } trait AlertAttributes { - _: AttributeDef ⇒ + _: AttributeDef => val artifactAttributes: Seq[Attribute[_]] = { val remoteAttachmentAttributes = Seq( @@ -71,12 +82,12 @@ class AlertModel extends ModelDef[AlertModel, Alert]("alert", "Alert", "/alert") private[AlertModel] lazy val logger = Logger(getClass) override val defaultSortBy: Seq[String] = Seq("-date") - override val removeAttribute: JsObject = Json.obj("status" → AlertStatus.Ignored) + override val removeAttribute: JsObject = Json.obj("status" -> AlertStatus.Ignored) override val computedMetrics: Map[String, String] = Map( - "observableCount" → "if (params._source.containsKey('artifacts')) { params._source['artifacts'].size() } else 0", - "handlingDurationInSeconds" → "(doc['updatedAt'].date.getMillis() - doc['createdAt'].date.getMillis()) / 1000", - "handlingDurationInHours" → "(doc['updatedAt'].date.getMillis() - doc['createdAt'].date.getMillis()) / 3600000", - "handlingDurationInDays" → "(doc['updatedAt'].date.getMillis() - doc['createdAt'].date.getMillis()) / (3600000 * 24)" + "observableCount" -> "if (params._source.containsKey('artifacts')) { params._source['artifacts'].size() } else 0", + "handlingDurationInSeconds" -> "(doc['updatedAt'].date.getMillis() - doc['createdAt'].date.getMillis()) / 1000", + "handlingDurationInHours" -> "(doc['updatedAt'].date.getMillis() - doc['createdAt'].date.getMillis()) / 3600000", + "handlingDurationInDays" -> "(doc['updatedAt'].date.getMillis() - doc['createdAt'].date.getMillis()) / (3600000 * 24)" ) override def creationHook(parent: Option[BaseEntity], attrs: JsObject): Future[JsObject] = { @@ -84,11 +95,11 @@ class AlertModel extends ModelDef[AlertModel, Alert]("alert", "Alert", "/alert") val missingDataErrors = (attrs \ "artifacts") .asOpt[Seq[JsValue]] .getOrElse(Nil) - .filter { a ⇒ + .filter { a => ((a \ "data").toOption.isEmpty && (a \ "attachment").toOption.isEmpty && (a \ "remoteAttachment").toOption.isEmpty) || ((a \ "tags").toOption.isEmpty && (a \ "message").toOption.isEmpty) } - .map(v ⇒ InvalidFormatAttributeError("artifacts", "artifact", JsonInputValue(v))) + .map(v => InvalidFormatAttributeError("artifacts", "artifact", JsonInputValue(v))) if (missingDataErrors.nonEmpty) Future.failed(AttributeCheckingError("alert", missingDataErrors)) else @@ -101,7 +112,7 @@ class AlertModel extends ModelDef[AlertModel, Alert]("alert", "Alert", "/alert") val source = (attrs \ "source").asOpt[String].getOrElse("") val sourceRef = (attrs \ "sourceRef").asOpt[String].getOrElse("") val _id = hasher.fromString(s"$tpe|$source|$sourceRef").head.toString() - attrs + ("_id" → JsString(_id)) + attrs + ("_id" -> JsString(_id)) } } } @@ -111,23 +122,23 @@ class Alert(model: AlertModel, attributes: JsObject) extends EntityDef[AlertMode override def toJson: JsObject = super.toJson + - ("artifacts" → JsArray(artifacts().map { + ("artifacts" -> JsArray(artifacts().map { // for file artifact, parse data as Json - case a if (a \ "dataType").asOpt[String].contains("file") ⇒ - Try(a + ("data" → Json.parse((a \ "data").as[String]))).getOrElse(a) - case a ⇒ a + case a if (a \ "dataType").asOpt[String].contains("file") => + Try(a + ("data" -> Json.parse((a \ "data").as[String]))).getOrElse(a) + case a => a })) def toCaseJson: JsObject = Json.obj( //"caseId" → caseId, - "title" → title(), - "description" → description(), - "severity" → severity(), + "title" -> title(), + "description" -> description(), + "severity" -> severity(), //"owner" → owner, - "startDate" → date(), - "tags" → tags(), - "tlp" → tlp(), - "status" → CaseStatus.Open + "startDate" -> date(), + "tags" -> tags(), + "tlp" -> tlp(), + "status" -> CaseStatus.Open ) } diff --git a/thehive-backend/app/models/Artifact.scala b/thehive-backend/app/models/Artifact.scala index beb747afaa..306736763f 100644 --- a/thehive-backend/app/models/Artifact.scala +++ b/thehive-backend/app/models/Artifact.scala @@ -18,7 +18,7 @@ import akka.{Done, NotUsed} import models.JsonFormat.artifactStatusFormat import services.{ArtifactSrv, AuditedModel} -import org.elastic4play.models.{AttributeDef, BaseEntity, ChildModelDef, EntityDef, HiveEnumeration, AttributeFormat ⇒ F, AttributeOption ⇒ O} +import org.elastic4play.models.{AttributeDef, BaseEntity, ChildModelDef, EntityDef, HiveEnumeration, AttributeFormat => F, AttributeOption => O} import org.elastic4play.services.{Attachment, AttachmentSrv, DBLists} import org.elastic4play.utils.MultiHash import org.elastic4play.{BadRequestError, InternalError} @@ -28,7 +28,7 @@ object ArtifactStatus extends Enumeration with HiveEnumeration { val Ok, Deleted = Value } -trait ArtifactAttributes { _: AttributeDef ⇒ +trait ArtifactAttributes { _: AttributeDef => def dblists: DBLists val artifactId: A[String] = attribute("_id", F.stringFmt, "Artifact id", O.model) val data: A[Option[String]] = optionalAttribute("data", F.stringFmt, "Content of the artifact", O.readonly) @@ -56,11 +56,11 @@ class ArtifactModel @Inject()( with ArtifactAttributes with AuditedModel { private[ArtifactModel] lazy val logger = Logger(getClass) - override val removeAttribute: JsObject = Json.obj("status" → ArtifactStatus.Deleted) + override val removeAttribute: JsObject = Json.obj("status" -> ArtifactStatus.Deleted) override def apply(attributes: JsObject): Artifact = { val tags = (attributes \ "tags").asOpt[Seq[JsString]].getOrElse(Nil).distinct - new Artifact(this, attributes + ("tags" → JsArray(tags))) + new Artifact(this, attributes + ("tags" -> JsArray(tags))) } // this method modify request in order to hash artifact and manager file upload @@ -70,23 +70,23 @@ class ArtifactModel @Inject()( throw BadRequestError(s"Artifact must contain a message or on ore more tags") if (keys.contains("data") == keys.contains("attachment")) throw BadRequestError(s"Artifact must contain data or attachment (but not both)") - computeId(parent.getOrElse(throw InternalError(s"artifact $attrs has no parent")), attrs).map { id ⇒ - attrs + ("_id" → JsString(id)) + computeId(parent.getOrElse(throw InternalError(s"artifact $attrs has no parent")), attrs).map { id => + attrs + ("_id" -> JsString(id)) } } override def updateHook(entity: BaseEntity, updateAttrs: JsObject): Future[JsObject] = entity match { - case artifact: Artifact ⇒ + case artifact: Artifact => val removeMessage = (updateAttrs \ "message").toOption.exists { - case JsNull ⇒ true - case JsArray(Seq()) ⇒ true - case _ ⇒ false + case JsNull => true + case JsArray(Seq()) => true + case _ => false } val removeTags = (updateAttrs \ "tags").toOption.exists { - case JsNull ⇒ true - case JsArray(Seq()) ⇒ true - case _ ⇒ false + case JsNull => true + case JsArray(Seq()) => true + case _ => false } if ((removeMessage && removeTags) || (removeMessage && artifact.tags().isEmpty) || @@ -102,12 +102,12 @@ class ArtifactModel @Inject()( mm.addValue((attrs \ "data").asOpt[JsValue].getOrElse(JsNull)) mm.addValue((attrs \ "dataType").asOpt[JsValue].getOrElse(JsNull)) for { - IOResult(_, _) ← (attrs \ "attachment" \ "filepath") + IOResult(_, _) <- (attrs \ "attachment" \ "filepath") .asOpt[String] - .fold(Future.successful(IOResult(0, Success(Done))))(file ⇒ mm.addFile(file)) - _ ← (attrs \ "attachment" \ "id") + .fold(Future.successful(IOResult(0, Success(Done))))(file => mm.addFile(file)) + _ <- (attrs \ "attachment" \ "id") .asOpt[String] - .fold(Future.successful(NotUsed: NotUsed)) { fileId ⇒ + .fold(Future.successful(NotUsed: NotUsed)) { fileId => mm.addFile(attachmentSrv.source(fileId)) } } yield { @@ -118,17 +118,17 @@ class ArtifactModel @Inject()( override def getStats(entity: BaseEntity): Future[JsObject] = entity match { - case artifact: Artifact ⇒ + case artifact: Artifact => val (similarArtifacts, total) = artifactSrv.get.findSimilar(artifact, Some("0-1"), Seq("-ioc")) for { - ioc ← similarArtifacts.runWith(Sink.headOption).map(_.fold(false)(_.ioc())) - t ← total - } yield Json.obj("seen" → t, "ioc" → ioc) - case _ ⇒ Future.successful(JsObject.empty) + ioc <- similarArtifacts.runWith(Sink.headOption).map(_.fold(false)(_.ioc())) + t <- total + } yield Json.obj("seen" -> t, "ioc" -> ioc) + case _ => Future.successful(JsObject.empty) } } class Artifact(model: ArtifactModel, attributes: JsObject) extends EntityDef[ArtifactModel, Artifact](model, attributes) with ArtifactAttributes { def dblists: DBLists = model.dblists - override def toJson: JsObject = super.toJson + ("reports" → Json.parse(reports())) // FIXME is parse fails (invalid report) + override def toJson: JsObject = super.toJson + ("reports" -> Json.parse(reports())) // FIXME is parse fails (invalid report) } diff --git a/thehive-backend/app/models/AttributeFormat.scala b/thehive-backend/app/models/AttributeFormat.scala index 6bf2ad96ac..e435c1ae17 100644 --- a/thehive-backend/app/models/AttributeFormat.scala +++ b/thehive-backend/app/models/AttributeFormat.scala @@ -26,22 +26,22 @@ object SeverityAttributeFormat extends NumberAttributeFormat { override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match { - case JsNumber(v) if subNames.isEmpty && isValidValue(v.toLong) ⇒ Good(value) - case _ ⇒ formatError(JsonInputValue(value)) + case JsNumber(v) if subNames.isEmpty && isValidValue(v.toLong) => Good(value) + case _ => formatError(JsonInputValue(value)) } override def fromInputValue(subNames: Seq[String], value: InputValue): Long Or Every[AttributeError] = value match { - case StringInputValue(Seq(v)) if subNames.isEmpty ⇒ + case StringInputValue(Seq(v)) if subNames.isEmpty => try { val longValue = v.toLong if (isValidValue(longValue)) Good(longValue) else formatError(value) } catch { - case _: Throwable ⇒ formatError(value) + case _: Throwable => formatError(value) } - case JsonInputValue(JsNumber(v)) ⇒ Good(v.longValue) - case _ ⇒ formatError(value) + case JsonInputValue(JsNumber(v)) => Good(v.longValue) + case _ => formatError(value) } } @@ -61,21 +61,21 @@ object TlpAttributeFormat extends NumberAttributeFormat { ) override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match { - case JsNumber(v) if subNames.isEmpty && isValidValue(v.toLong) ⇒ Good(value) - case _ ⇒ formatError(JsonInputValue(value)) + case JsNumber(v) if subNames.isEmpty && isValidValue(v.toLong) => Good(value) + case _ => formatError(JsonInputValue(value)) } override def fromInputValue(subNames: Seq[String], value: InputValue): Long Or Every[AttributeError] = value match { - case StringInputValue(Seq(v)) if subNames.isEmpty ⇒ + case StringInputValue(Seq(v)) if subNames.isEmpty => try { val longValue = v.toLong if (isValidValue(longValue)) Good(longValue) else formatError(value) } catch { - case _: Throwable ⇒ formatError(value) + case _: Throwable => formatError(value) } - case JsonInputValue(JsNumber(v)) ⇒ Good(v.longValue) - case _ ⇒ formatError(value) + case JsonInputValue(JsNumber(v)) => Good(v.longValue) + case _ => formatError(value) } } diff --git a/thehive-backend/app/models/Audit.scala b/thehive-backend/app/models/Audit.scala index 5aa0f32b5a..3fe227757b 100644 --- a/thehive-backend/app/models/Audit.scala +++ b/thehive-backend/app/models/Audit.scala @@ -24,12 +24,12 @@ import org.elastic4play.models.{ ObjectAttributeFormat, OptionalAttributeFormat, StringAttributeFormat, - AttributeOption ⇒ O + AttributeOption => O } import org.elastic4play.services.{AuditableAction, AuxSrv} import org.elastic4play.services.JsonFormat.auditableActionFormat -trait AuditAttributes { _: AttributeDef ⇒ +trait AuditAttributes { _: AttributeDef => def detailsAttributes: Seq[Attribute[_]] val operation: A[AuditableAction.Value] = attribute("operation", AttributeFormat.enumFmt(AuditableAction), "Operation", O.readonly) @@ -62,14 +62,14 @@ class AuditModel(auditName: String, auditedModels: immutable.Set[AuditedModel], def mergeAttributeFormat(context: String, format1: AttributeFormat[_], format2: AttributeFormat[_]): Option[AttributeFormat[_]] = (format1, format2) match { - case (OptionalAttributeFormat(f1), f2) ⇒ mergeAttributeFormat(context, f1, f2) - case (f1, OptionalAttributeFormat(f2)) ⇒ mergeAttributeFormat(context, f1, f2) - case (MultiAttributeFormat(f1), MultiAttributeFormat(f2)) ⇒ mergeAttributeFormat(context, f1, f2).map(MultiAttributeFormat(_)) - case (f1, EnumerationAttributeFormat(_) | ListEnumerationAttributeFormat(_)) ⇒ mergeAttributeFormat(context, f1, StringAttributeFormat) - case (EnumerationAttributeFormat(_) | ListEnumerationAttributeFormat(_), f2) ⇒ mergeAttributeFormat(context, StringAttributeFormat, f2) - case (ObjectAttributeFormat(subAttributes1), ObjectAttributeFormat(subAttributes2)) ⇒ mergeAttributes(context, subAttributes1 ++ subAttributes2) - case (f1, f2) if f1 == f2 ⇒ Some(f1) - case (f1, f2) ⇒ + case (OptionalAttributeFormat(f1), f2) => mergeAttributeFormat(context, f1, f2) + case (f1, OptionalAttributeFormat(f2)) => mergeAttributeFormat(context, f1, f2) + case (MultiAttributeFormat(f1), MultiAttributeFormat(f2)) => mergeAttributeFormat(context, f1, f2).map(MultiAttributeFormat(_)) + case (f1, EnumerationAttributeFormat(_) | ListEnumerationAttributeFormat(_)) => mergeAttributeFormat(context, f1, StringAttributeFormat) + case (EnumerationAttributeFormat(_) | ListEnumerationAttributeFormat(_), f2) => mergeAttributeFormat(context, StringAttributeFormat, f2) + case (ObjectAttributeFormat(subAttributes1), ObjectAttributeFormat(subAttributes2)) => mergeAttributes(context, subAttributes1 ++ subAttributes2) + case (f1, f2) if f1 == f2 => Some(f1) + case (f1, f2) => logger.warn(s"Attribute $f1 != $f2") None @@ -79,22 +79,22 @@ class AuditModel(auditName: String, auditedModels: immutable.Set[AuditedModel], val mergeAttributes: Iterable[Option[Attribute[_]]] = attributes .groupBy(_.attributeName) .map { - case (_name, _attributes) ⇒ + case (_name, _attributes) => _attributes - .map(a ⇒ Some(a.format)) + .map(a => Some(a.format)) .reduce[Option[AttributeFormat[_]]] { - case (Some(f1), Some(f2)) ⇒ mergeAttributeFormat(context + "." + _name, f1, f2) - case _ ⇒ None + case (Some(f1), Some(f2)) => mergeAttributeFormat(context + "." + _name, f1, f2) + case _ => None } .map { - case oaf: OptionalAttributeFormat[_] ⇒ oaf: AttributeFormat[_] - case maf: MultiAttributeFormat[_] ⇒ maf: AttributeFormat[_] - case f ⇒ OptionalAttributeFormat(f): AttributeFormat[_] + case oaf: OptionalAttributeFormat[_] => oaf: AttributeFormat[_] + case maf: MultiAttributeFormat[_] => maf: AttributeFormat[_] + case f => OptionalAttributeFormat(f): AttributeFormat[_] } - .map(format ⇒ Attribute("audit", _name, format, Nil, None, "")) + .map(format => Attribute("audit", _name, format, Nil, None, "")) .orElse { logger.error( - s"Mapping is not consistent on attribute $context:\n${_attributes.map(a ⇒ a.modelName + "/" + a.attributeName + ": " + a.format.name).mkString("\n")}" + s"Mapping is not consistent on attribute $context:\n${_attributes.map(a => a.modelName + "/" + a.attributeName + ": " + a.format.name).mkString("\n")}" ) None } @@ -111,21 +111,21 @@ class AuditModel(auditName: String, auditedModels: immutable.Set[AuditedModel], "audit", auditedModels .flatMap(_.attributes) - .filter(a ⇒ a.isModel && !a.isUnaudited) + .filter(a => a.isModel && !a.isUnaudited) .toSeq ).map(_.subAttributes) .getOrElse(Nil) override def getStats(entity: BaseEntity): Future[JsObject] = entity match { - case audit: Audit ⇒ + case audit: Audit => auxSrv(audit.objectType(), audit.objectId(), 10, withStats = false, removeUnaudited = true) .recover { - case t ⇒ + case t => logger.error("Audit stats failure", t) JsObject.empty } - case other ⇒ + case other => logger.warn(s"Request caseStats from a non-case entity ?! ${other.getClass}:$other") Future.successful(JsObject.empty) } diff --git a/thehive-backend/app/models/Case.scala b/thehive-backend/app/models/Case.scala index 450d471d3d..6b39e5eb07 100644 --- a/thehive-backend/app/models/Case.scala +++ b/thehive-backend/app/models/Case.scala @@ -15,7 +15,7 @@ import models.JsonFormat.{caseImpactStatusFormat, caseResolutionStatusFormat, ca import services.{AuditedModel, CaseSrv} import org.elastic4play.JsonFormat.dateFormat -import org.elastic4play.models.{AttributeDef, BaseEntity, EntityDef, HiveEnumeration, ModelDef, AttributeFormat ⇒ F, AttributeOption ⇒ O} +import org.elastic4play.models.{AttributeDef, BaseEntity, EntityDef, HiveEnumeration, ModelDef, AttributeFormat => F, AttributeOption => O} import org.elastic4play.services.{FindSrv, SequenceSrv} object CaseStatus extends Enumeration with HiveEnumeration { @@ -33,7 +33,7 @@ object CaseImpactStatus extends Enumeration with HiveEnumeration { val NoImpact, WithImpact, NotApplicable = Value } -trait CaseAttributes { _: AttributeDef ⇒ +trait CaseAttributes { _: AttributeDef => val caseId: A[Long] = attribute("caseId", F.numberFmt, "Id of the case (auto-generated)", O.model) val title: A[String] = attribute("title", F.textFmt, "Title of the case") val description: A[String] = attribute("description", F.textFmt, "Description of the case") @@ -68,7 +68,7 @@ class CaseModel @Inject()( implicit val ec: ExecutionContext ) extends ModelDef[CaseModel, Case]("case", "Case", "/case") with CaseAttributes - with AuditedModel { caseModel ⇒ + with AuditedModel { caseModel => private lazy val logger = Logger(getClass) private lazy val artifactModel = artifactModelProvider.get @@ -77,29 +77,29 @@ class CaseModel @Inject()( private lazy val alertModel = alertModelProvider.get override val defaultSortBy = Seq("-startDate") - override val removeAttribute: JsObject = Json.obj("status" → CaseStatus.Deleted) + override val removeAttribute: JsObject = Json.obj("status" -> CaseStatus.Deleted) override def creationHook(parent: Option[BaseEntity], attrs: JsObject): Future[JsObject] = - sequenceSrv("case").map { caseId ⇒ + sequenceSrv("case").map { caseId => attrs + - ("caseId" → JsNumber(caseId)) + - ("owner" → (attrs \ "owner").asOpt[String].fold[JsValue](JsNull)(o ⇒ JsString(o.toLowerCase()))) + ("caseId" -> JsNumber(caseId)) + + ("owner" -> (attrs \ "owner").asOpt[String].fold[JsValue](JsNull)(o => JsString(o.toLowerCase()))) } private def updateStatus(updateAttrs: JsObject): JsObject = (updateAttrs \ "status").asOpt[CaseStatus.Type] match { - case Some(CaseStatus.Resolved) if !updateAttrs.keys.contains("endDate") ⇒ + case Some(CaseStatus.Resolved) if !updateAttrs.keys.contains("endDate") => updateAttrs + - ("endDate" → Json.toJson(new Date)) + - ("flag" → JsFalse) - case Some(CaseStatus.Open) ⇒ - updateAttrs + ("endDate" → JsArray(Nil)) - case _ ⇒ + ("endDate" -> Json.toJson(new Date)) + + ("flag" -> JsFalse) + case Some(CaseStatus.Open) => + updateAttrs + ("endDate" -> JsArray(Nil)) + case _ => updateAttrs } private def lowercaseOwner(updateAttrs: JsObject): JsObject = - (updateAttrs \ "owner").asOpt[String].fold(updateAttrs)(o ⇒ updateAttrs + ("owner" → JsString(o.toLowerCase))) + (updateAttrs \ "owner").asOpt[String].fold(updateAttrs)(o => updateAttrs + ("owner" -> JsString(o.toLowerCase))) override def updateHook(entity: BaseEntity, updateAttrs: JsObject): Future[JsObject] = Future.successful(lowercaseOwner(updateStatus(updateAttrs))) @@ -107,86 +107,86 @@ class CaseModel @Inject()( private[models] def buildArtifactStats(caze: Case): Future[JsObject] = { import org.elastic4play.services.QueryDSL._ findSrv(artifactModel, and(parent("case", withId(caze.id)), "status" ~= "Ok"), selectCount) - .map { artifactStats ⇒ - Json.obj("artifacts" → artifactStats) + .map { artifactStats => + Json.obj("artifacts" -> artifactStats) } } private[models] def buildTaskStats(caze: Case): Future[JsObject] = { import org.elastic4play.services.QueryDSL._ findSrv(taskModel, and(parent("case", withId(caze.id)), "status" in ("Waiting", "InProgress", "Completed")), groupByField("status", selectCount)) - .map { taskStatsJson ⇒ + .map { taskStatsJson => val (taskCount, taskStats) = taskStatsJson.value.foldLeft((0L, JsObject.empty)) { - case ((total, s), (key, value)) ⇒ + case ((total, s), (key, value)) => val count = (value \ "count").as[Long] - (total + count, s + (key → JsNumber(count))) + (total + count, s + (key -> JsNumber(count))) } - Json.obj("tasks" → (taskStats + ("total" → JsNumber(taskCount)))) + Json.obj("tasks" -> (taskStats + ("total" -> JsNumber(taskCount)))) } } private[models] def buildMergeIntoStats(caze: Case): Future[JsObject] = caze .mergeInto() - .fold(Future.successful(JsObject.empty)) { mergeCaseId ⇒ + .fold(Future.successful(JsObject.empty)) { mergeCaseId => caseSrv .get(mergeCaseId) - .map { c ⇒ - Json.obj("mergeInto" → Json.obj("caseId" → c.caseId(), "title" → c.title())) + .map { c => + Json.obj("mergeInto" -> Json.obj("caseId" -> c.caseId(), "title" -> c.title())) } .recover { - case _ ⇒ Json.obj("mergeInto" → Json.obj("caseId" → "", "title" → "")) + case _ => Json.obj("mergeInto" -> Json.obj("caseId" -> "", "title" -> "")) } } private[models] def buildMergeFromStats(caze: Case): Future[JsObject] = Future - .traverse(caze.mergeFrom()) { id ⇒ + .traverse(caze.mergeFrom()) { id => caseSrv .get(id) - .map { c ⇒ - Json.obj("caseId" → c.caseId(), "title" → c.title()) + .map { c => + Json.obj("caseId" -> c.caseId(), "title" -> c.title()) } .recover { - case _ ⇒ Json.obj("caseId" → "", "title" → "") + case _ => Json.obj("caseId" -> "", "title" -> "") } } .map { - case mf if mf.nonEmpty ⇒ Json.obj("mergeFrom" → mf) - case _ ⇒ JsObject.empty + case mf if mf.nonEmpty => Json.obj("mergeFrom" -> mf) + case _ => JsObject.empty } private[models] def buildAlertStats(caze: Case): Future[JsObject] = { import org.elastic4play.services.QueryDSL._ findSrv(alertModel, "case" ~= caze.id, groupByField("type", groupByField("source", selectCount))) - .map { alertStatsJson ⇒ + .map { alertStatsJson => val alertStats = for { - (tpe, JsObject(srcStats)) ← alertStatsJson.value - src ← srcStats.keys - } yield Json.obj("type" → tpe, "source" → src) - Json.obj("alerts" → alertStats) + (tpe, JsObject(srcStats)) <- alertStatsJson.value + src <- srcStats.keys + } yield Json.obj("type" -> tpe, "source" -> src) + Json.obj("alerts" -> alertStats) } } override def getStats(entity: BaseEntity): Future[JsObject] = entity match { - case caze: Case ⇒ + case caze: Case => for { - taskStats ← buildTaskStats(caze) - artifactStats ← buildArtifactStats(caze) - alertStats ← buildAlertStats(caze) - mergeIntoStats ← buildMergeIntoStats(caze) - mergeFromStats ← buildMergeFromStats(caze) + taskStats <- buildTaskStats(caze) + artifactStats <- buildArtifactStats(caze) + alertStats <- buildAlertStats(caze) + mergeIntoStats <- buildMergeIntoStats(caze) + mergeFromStats <- buildMergeFromStats(caze) } yield taskStats ++ artifactStats ++ alertStats ++ mergeIntoStats ++ mergeFromStats - case other ⇒ + case other => logger.warn(s"Request caseStats from a non-case entity ?! ${other.getClass}:$other") Future.successful(JsObject.empty) } override val computedMetrics = Map( - "handlingDurationInSeconds" → "(doc['endDate'].date.getMillis() - doc['startDate'].date.getMillis()) / 1000", - "handlingDurationInHours" → "(doc['endDate'].date.getMillis() - doc['startDate'].date.getMillis()) / 3600000", - "handlingDurationInDays" → "(doc['endDate'].date.getMillis() - doc['startDate'].date.getMillis()) / (3600000 * 24)" + "handlingDurationInSeconds" -> "(doc['endDate'].date.getMillis() - doc['startDate'].date.getMillis()) / 1000", + "handlingDurationInHours" -> "(doc['endDate'].date.getMillis() - doc['startDate'].date.getMillis()) / 3600000", + "handlingDurationInDays" -> "(doc['endDate'].date.getMillis() - doc['startDate'].date.getMillis()) / (3600000 * 24)" ) } diff --git a/thehive-backend/app/models/CaseTemplate.scala b/thehive-backend/app/models/CaseTemplate.scala index 36d941f9ee..fdd9eaa509 100644 --- a/thehive-backend/app/models/CaseTemplate.scala +++ b/thehive-backend/app/models/CaseTemplate.scala @@ -6,20 +6,20 @@ import play.api.libs.json.{JsObject, JsValue} import models.JsonFormat.caseTemplateStatusFormat -import org.elastic4play.models.{Attribute, AttributeDef, EntityDef, HiveEnumeration, ModelDef, AttributeFormat ⇒ F} +import org.elastic4play.models.{Attribute, AttributeDef, EntityDef, HiveEnumeration, ModelDef, AttributeFormat => F} object CaseTemplateStatus extends Enumeration with HiveEnumeration { type Type = Value val Ok, Deleted = Value } -trait CaseTemplateAttributes { _: AttributeDef ⇒ +trait CaseTemplateAttributes { _: AttributeDef => def taskAttributes: Seq[Attribute[_]] val templateName: A[String] = attribute("name", F.stringFmt, "Name of the template") val titlePrefix: A[Option[String]] = optionalAttribute("titlePrefix", F.textFmt, "Title of the case") val description: A[Option[String]] = optionalAttribute("description", F.textFmt, "Description of the case") - val severity: A[Option[Long]] = optionalAttribute("severity", SeverityAttributeFormat, "Severity if the case is an incident (1-4)") + val severity: A[Option[Long]] = optionalAttribute("severity", SeverityAttributeFormat, "Severity if the case is an incident (1-4)") val tags: A[Seq[String]] = multiAttribute("tags", F.stringFmt, "Case tags") val flag: A[Option[Boolean]] = optionalAttribute("flag", F.booleanFmt, "Flag of the case") val tlp: A[Option[Long]] = optionalAttribute("tlp", TlpAttributeFormat, "TLP level") diff --git a/thehive-backend/app/models/Dashboard.scala b/thehive-backend/app/models/Dashboard.scala index 7a8ae83aec..a5f54ff7fa 100644 --- a/thehive-backend/app/models/Dashboard.scala +++ b/thehive-backend/app/models/Dashboard.scala @@ -8,14 +8,14 @@ import play.api.libs.json._ import models.JsonFormat.dashboardStatusFormat -import org.elastic4play.models.{AttributeDef, EntityDef, HiveEnumeration, ModelDef, AttributeFormat ⇒ F} +import org.elastic4play.models.{AttributeDef, EntityDef, HiveEnumeration, ModelDef, AttributeFormat => F} object DashboardStatus extends Enumeration with HiveEnumeration { type Type = Value val Private, Shared, Deleted = Value } -trait DashboardAttributes { _: AttributeDef ⇒ +trait DashboardAttributes { _: AttributeDef => val title: A[String] = attribute("title", F.textFmt, "Title of the dashboard") val description: A[String] = attribute("description", F.textFmt, "Description of the dashboard") val status: A[DashboardStatus.Value] = attribute("status", F.enumFmt(DashboardStatus), "Status of the case", DashboardStatus.Private) @@ -24,10 +24,10 @@ trait DashboardAttributes { _: AttributeDef ⇒ @Singleton class DashboardModel @Inject()() extends ModelDef[DashboardModel, Dashboard]("dashboard", "Dashboard", "/dashboard") with DashboardAttributes { - dashboardModel ⇒ + dashboardModel => private[DashboardModel] lazy val logger = Logger(getClass) - override val removeAttribute: JsObject = Json.obj("status" → DashboardStatus.Deleted) + override val removeAttribute: JsObject = Json.obj("status" -> DashboardStatus.Deleted) } class Dashboard(model: DashboardModel, attributes: JsObject) extends EntityDef[DashboardModel, Dashboard](model, attributes) with DashboardAttributes diff --git a/thehive-backend/app/models/JsonFormat.scala b/thehive-backend/app/models/JsonFormat.scala index c698ecb4c2..3cb6f9fa14 100644 --- a/thehive-backend/app/models/JsonFormat.scala +++ b/thehive-backend/app/models/JsonFormat.scala @@ -19,12 +19,12 @@ object JsonFormat { implicit val alertStatusFormat: Format[AlertStatus.Type] = enumFormat(AlertStatus) implicit val dashboardStatusFormat: Format[DashboardStatus.Type] = enumFormat(DashboardStatus) - implicit val pathWrites: Writes[Path] = Writes((value: Path) ⇒ JsString(value.toString)) + implicit val pathWrites: Writes[Path] = Writes((value: Path) => JsString(value.toString)) - private val roleWrites: Writes[Role] = Writes((role: Role) ⇒ JsString(role.name.toLowerCase())) + private val roleWrites: Writes[Role] = Writes((role: Role) => JsString(role.name.toLowerCase())) private val roleReads: Reads[Role] = Reads { - case JsString(s) if Roles.isValid(s) ⇒ JsSuccess(Roles.withName(s).get) - case _ ⇒ JsError(Seq(JsPath → Seq(JsonValidationError(s"error.expected.role(${Roles.roleNames}")))) + case JsString(s) if Roles.isValid(s) => JsSuccess(Roles.withName(s).get) + case _ => JsError(Seq(JsPath -> Seq(JsonValidationError(s"error.expected.role(${Roles.roleNames}")))) } implicit val roleFormat: Format[Role] = Format[Role](roleReads, roleWrites) } diff --git a/thehive-backend/app/models/Log.scala b/thehive-backend/app/models/Log.scala index b8e77d5d2e..a21fc5b68b 100644 --- a/thehive-backend/app/models/Log.scala +++ b/thehive-backend/app/models/Log.scala @@ -9,14 +9,14 @@ import play.api.libs.json.{JsObject, Json} import models.JsonFormat.logStatusFormat import services.AuditedModel -import org.elastic4play.models.{AttributeDef, ChildModelDef, EntityDef, HiveEnumeration, AttributeFormat ⇒ F, AttributeOption ⇒ O} +import org.elastic4play.models.{AttributeDef, ChildModelDef, EntityDef, HiveEnumeration, AttributeFormat => F, AttributeOption => O} object LogStatus extends Enumeration with HiveEnumeration { type Type = Value val Ok, Deleted = Value } -trait LogAttributes { _: AttributeDef ⇒ +trait LogAttributes { _: AttributeDef => val message = attribute("message", F.textFmt, "Message") val startDate = attribute("startDate", F.dateFmt, "Timestamp of the comment", new Date) // attachment is stored as JsObject containing : @@ -36,6 +36,6 @@ class LogModel @Inject()(taskModel: TaskModel) with LogAttributes with AuditedModel { override val defaultSortBy: Seq[String] = Seq("-startDate") - override val removeAttribute: JsObject = Json.obj("status" → LogStatus.Deleted) + override val removeAttribute: JsObject = Json.obj("status" -> LogStatus.Deleted) } class Log(model: LogModel, attributes: JsObject) extends EntityDef[LogModel, Log](model, attributes) with LogAttributes diff --git a/thehive-backend/app/models/Migration.scala b/thehive-backend/app/models/Migration.scala index febdd6e875..d07e0bf871 100644 --- a/thehive-backend/app/models/Migration.scala +++ b/thehive-backend/app/models/Migration.scala @@ -79,38 +79,38 @@ class Migration( private def addDataTypes(dataTypes: Seq[String]): Future[Unit] = { val dataTypeList = dblists.apply("list_artifactDataType") Future - .traverse(dataTypes) { dt ⇒ + .traverse(dataTypes) { dt => dataTypeList .addItem(dt) - .map(_ ⇒ ()) + .map(_ => ()) .recover { - case _: ConflictError ⇒ - case error ⇒ logger.error(s"Failed to add dataType $dt during migration", error) + case _: ConflictError => + case error => logger.error(s"Failed to add dataType $dt during migration", error) } } - .map(_ ⇒ ()) + .map(_ => ()) } private def addDashboards(version: Int): Future[Unit] = dashboardSrv.find(QueryDSL.any, Some("0-0"), Nil)._2.flatMap { - case 0 ⇒ - userSrv.inInitAuthContext { implicit authContext ⇒ + case 0 => + userSrv.inInitAuthContext { implicit authContext => val dashboardsPath = environment.rootPath.toPath.resolve("migration").resolve("12").resolve("dashboards") val dashboards = for { - dashboardFile ← Try(Files.newDirectoryStream(dashboardsPath, "*.json").asScala).getOrElse(Nil) + dashboardFile <- Try(Files.newDirectoryStream(dashboardsPath, "*.json").asScala).getOrElse(Nil) if Files.isReadable(dashboardFile) - dashboardJson ← Try(readJsonFile(dashboardFile).as[JsObject]).toOption + dashboardJson <- Try(readJsonFile(dashboardFile).as[JsObject]).toOption dashboardDefinition = (dashboardJson \ "definition").as[JsValue].toString dash = dashboardSrv - .create(Fields(dashboardJson + ("definition" → JsString(dashboardDefinition)))) - .map(_ ⇒ ()) + .create(Fields(dashboardJson + ("definition" -> JsString(dashboardDefinition)))) + .map(_ => ()) .recover { - case error ⇒ logger.error(s"Failed to create dashboard $dashboardFile during migration", error) + case error => logger.error(s"Failed to create dashboard $dashboardFile during migration", error) } } yield dash - Future.sequence(dashboards).map(_ ⇒ ()) + Future.sequence(dashboards).map(_ => ()) } - case _ ⇒ Future.successful(()) + case _ => Future.successful(()) } override def endMigration(version: Int): Future[Unit] = { @@ -137,40 +137,40 @@ class Migration( "file", "autonomous-system" ) - ).andThen { case _ ⇒ addDashboards(version + 1) } + ).andThen { case _ => addDashboards(version + 1) } } override val operations: PartialFunction[DatabaseState, Seq[Operation]] = { - case DatabaseState(version) if version < 7 ⇒ Nil - case DatabaseState(7) ⇒ + case DatabaseState(version) if version < 7 => Nil + case DatabaseState(7) => Seq( renameAttribute("reportTemplate", "analyzerId", "analyzers"), // reportTemplate refers only one analyzer renameAttribute("reportTemplate", "reportType", "flavor"), // rename flavor into reportType removeAttribute("case", "isIncident"), // this information is now stored in resolutionStatus - mapEntity("case") { c ⇒ // add case owner + mapEntity("case") { c => // add case owner val owner = (c \ "createdBy") .asOpt[JsString] .getOrElse(JsString("init")) - c + ("owner" → owner) + c + ("owner" -> owner) }, - removeEntity("analyzer")(_ ⇒ true), // analyzer is now stored in cortex - addAttribute("case_artifact", "reports" → JsString("{}")), // add short reports in artifact - addAttribute("case_task", "order" → JsNumber(0)), // add task order - addAttribute("user", "preferences" → JsString("{}")), // add user preferences, default empty (Json object) + removeEntity("analyzer")(_ => true), // analyzer is now stored in cortex + addAttribute("case_artifact", "reports" -> JsString("{}")), // add short reports in artifact + addAttribute("case_task", "order" -> JsNumber(0)), // add task order + addAttribute("user", "preferences" -> JsString("{}")), // add user preferences, default empty (Json object) mapAttribute(Seq("case", "case_task", "case_task_log", "case_artifact", "audit", "case_artifact_job"), "startDate")(convertDate), mapAttribute(Seq("case", "case_task", "case_artifact_job"), "endDate")(convertDate), mapAttribute("misp", "date")(convertDate), mapAttribute("misp", "publishDate")(convertDate), - mapAttribute(_ ⇒ true, "createdAt", convertDate), - mapAttribute(_ ⇒ true, "updatedAt", convertDate) + mapAttribute(_ => true, "createdAt", convertDate), + mapAttribute(_ => true, "updatedAt", convertDate) ) - case DatabaseState(8) ⇒ + case DatabaseState(8) => requireUpdateMispAlertArtifact = true val hasher = Hasher("MD5") Seq( renameEntity("misp", "alert"), - mapEntity("alert") { misp ⇒ + mapEntity("alert") { misp => val eventId = (misp \ "eventId").as[Long].toString val date = (misp \ "date").as[Date] val mispTags = (misp \ "tags").asOpt[Seq[String]].getOrElse(Nil) @@ -178,37 +178,37 @@ class Migration( val tlp = mispTags .map(_.toLowerCase) .collectFirst { - case "tlp:white" ⇒ 0L - case "tlp:green" ⇒ 1L - case "tlp:amber" ⇒ 2L - case "tlp:red" ⇒ 3L + case "tlp:white" => 0L + case "tlp:green" => 1L + case "tlp:amber" => 2L + case "tlp:red" => 3L } .getOrElse(2L) val source = (misp \ "serverId").asOpt[String].getOrElse("") val _id = hasher.fromString(s"misp|$source|$eventId").head.toString() - (misp \ "caze").asOpt[JsString].fold(JsObject.empty)(c ⇒ Json.obj("caze" → c)) ++ + (misp \ "caze").asOpt[JsString].fold(JsObject.empty)(c => Json.obj("caze" -> c)) ++ Json.obj( - "_type" → "alert", - "_id" → _id, - "type" → "misp", - "source" → source, - "sourceRef" → eventId, - "date" → date, - "lastSyncDate" → (misp \ "publishDate").as[Date], - "title" → ("#" + eventId + " " + (misp \ "info").as[String]).trim, - "description" → s"Imported from MISP Event #$eventId, created at $date", - "severity" → (misp \ "threatLevel").as[JsNumber], - "tags" → tags, - "tlp" → tlp, - "artifacts" → JsArray(), - "caseTemplate" → mispCaseTemplate, - "status" → (misp \ "eventStatus").as[JsString], - "follow" → (misp \ "follow").as[JsBoolean] + "_type" -> "alert", + "_id" -> _id, + "type" -> "misp", + "source" -> source, + "sourceRef" -> eventId, + "date" -> date, + "lastSyncDate" -> (misp \ "publishDate").as[Date], + "title" -> ("#" + eventId + " " + (misp \ "info").as[String]).trim, + "description" -> s"Imported from MISP Event #$eventId, created at $date", + "severity" -> (misp \ "threatLevel").as[JsNumber], + "tags" -> tags, + "tlp" -> tlp, + "artifacts" -> JsArray(), + "caseTemplate" -> mispCaseTemplate, + "status" -> (misp \ "eventStatus").as[JsString], + "follow" -> (misp \ "follow").as[JsBoolean] ) }, - removeEntity("audit")(o ⇒ (o \ "objectType").asOpt[String].contains("alert")) + removeEntity("audit")(o => (o \ "objectType").asOpt[String].contains("alert")) ) - case ds @ DatabaseState(9) ⇒ + case ds @ DatabaseState(9) => object Base64 { def unapply(data: String): Option[Array[Byte]] = Try(java.util.Base64.getDecoder.decode(data)).toOption } @@ -229,152 +229,153 @@ class Migration( val extraHashers = Hasher(mainHash +: extraHashes: _*) Seq( // store alert attachment in datastore - Operation((f: String ⇒ Source[JsObject, NotUsed]) ⇒ { - case "alert" ⇒ + Operation((f: String => Source[JsObject, NotUsed]) => { + case "alert" => f("alert").flatMapConcat { - alert ⇒ + alert => val artifactsAndData = Future.traverse((alert \ "artifacts").asOpt[List[JsObject]].getOrElse(Nil)) { - artifact ⇒ + artifact => val isFile = (artifact \ "dataType").asOpt[String].contains("file") // get MISP attachment if (!isFile) - Future.successful(artifact → Nil) + Future.successful(artifact -> Nil) else { (for { - dataStr ← (artifact \ "data").asOpt[String] - dataJson ← Try(Json.parse(dataStr)).toOption - dataObj ← dataJson.asOpt[JsObject] - filename ← (dataObj \ "filename").asOpt[String].map(_.split("\\|").head) - attributeId ← (dataObj \ "attributeId").asOpt[String] - attributeType ← (dataObj \ "attributeType").asOpt[String] + dataStr <- (artifact \ "data").asOpt[String] + dataJson <- Try(Json.parse(dataStr)).toOption + dataObj <- dataJson.asOpt[JsObject] + filename <- (dataObj \ "filename").asOpt[String].map(_.split("\\|").head) + attributeId <- (dataObj \ "attributeId").asOpt[String] + attributeType <- (dataObj \ "attributeType").asOpt[String] } yield Future.successful( - (artifact - "data" + ("remoteAttachment" → Json - .obj("reference" → attributeId, "filename" → filename, "type" → attributeType))) → Nil + (artifact - "data" + ("remoteAttachment" -> Json + .obj("reference" -> attributeId, "filename" -> filename, "type" -> attributeType))) -> Nil )).orElse { (artifact \ "data") .asOpt[String] .collect { // get attachment encoded in data field - case AlertSrv.dataExtractor(filename, contentType, data @ Base64(rawData)) ⇒ + case AlertSrv.dataExtractor(filename, contentType, data @ Base64(rawData)) => val attachmentId = mainHasher.fromByteArray(rawData).head.toString() ds.getEntity(datastoreName, s"${attachmentId}_0") - .map(_ ⇒ Nil) + .map(_ => Nil) .recover { - case _ if containsOrAdd(attachmentId) ⇒ Nil - case _ ⇒ - Seq(Json.obj("_type" → datastoreName, "_id" → s"${attachmentId}_0", "data" → data)) + case _ if containsOrAdd(attachmentId) => Nil + case _ => + Seq(Json.obj("_type" -> datastoreName, "_id" -> s"${attachmentId}_0", "data" -> data)) } - .map { dataEntity ⇒ + .map { dataEntity => val attachment = Attachment(filename, extraHashers.fromByteArray(rawData), rawData.length.toLong, contentType, attachmentId) - (artifact - "data" + ("attachment" → Json.toJson(attachment))) → dataEntity + (artifact - "data" + ("attachment" -> Json.toJson(attachment))) -> dataEntity } } } - .getOrElse(Future.successful(artifact → Nil)) + .getOrElse(Future.successful(artifact -> Nil)) } } Source .future(artifactsAndData) - .mapConcat { ad ⇒ - val updatedAlert = alert + ("artifacts" → JsArray(ad.map(_._1))) + .mapConcat { ad => + val updatedAlert = alert + ("artifacts" -> JsArray(ad.map(_._1))) updatedAlert :: ad.flatMap(_._2) } } - case other ⇒ f(other) + case other => f(other) }), // Fix alert status mapAttribute("alert", "status") { - case JsString("Update") ⇒ JsString("Updated") - case JsString("Ignore") ⇒ JsString("Ignored") - case other ⇒ other + case JsString("Update") => JsString("Updated") + case JsString("Ignore") => JsString("Ignored") + case other => other }, // Fix double encode of metrics mapEntity("dblist") { - case dblist if (dblist \ "dblist").asOpt[String].contains("case_metrics") ⇒ - (dblist \ "value").asOpt[String].map(Json.parse).fold(dblist) { value ⇒ - dblist + ("value" → value) + case dblist if (dblist \ "dblist").asOpt[String].contains("case_metrics") => + (dblist \ "value").asOpt[String].map(Json.parse).fold(dblist) { value => + dblist + ("value" -> value) } - case other ⇒ other + case other => other }, // Add empty metrics and custom fields in cases - mapEntity("case") { caze ⇒ + mapEntity("case") { caze => val metrics = (caze \ "metrics").asOpt[JsObject].getOrElse(JsObject.empty) val customFields = (caze \ "customFields").asOpt[JsObject].getOrElse(JsObject.empty) - caze + ("metrics" → metrics) + ("customFields" → customFields) + caze + ("metrics" -> metrics) + ("customFields" -> customFields) } ) - case DatabaseState(10) ⇒ Nil - case DatabaseState(11) ⇒ + case DatabaseState(10) => Nil + case DatabaseState(11) => Seq( - mapEntity("case_task_log") { log ⇒ + mapEntity("case_task_log") { log => val owner = (log \ "createdBy").asOpt[JsString].getOrElse(JsString("init")) - log + ("owner" → owner) + log + ("owner" -> owner) }, - mapEntity(_ ⇒ true, entity ⇒ entity - "user"), - mapEntity("caseTemplate") { caseTemplate ⇒ + mapEntity(_ => true, entity => entity - "user"), + mapEntity("caseTemplate") { caseTemplate => val metricsName = (caseTemplate \ "metricNames").asOpt[Seq[String]].getOrElse(Nil) - val metrics = JsObject(metricsName.map(_ → JsNull)) - caseTemplate - "metricNames" + ("metrics" → metrics) + val metrics = JsObject(metricsName.map(_ -> JsNull)) + caseTemplate - "metricNames" + ("metrics" -> metrics) }, - addAttribute("case_artifact", "sighted" → JsFalse) + addAttribute("case_artifact", "sighted" -> JsFalse) ) - case ds @ DatabaseState(12) ⇒ + case ds @ DatabaseState(12) => Seq( // Remove alert artifacts in audit trail mapEntity("audit") { - case audit if (audit \ "objectType").asOpt[String].contains("alert") ⇒ - (audit \ "details").asOpt[JsObject].fold(audit) { details ⇒ - audit + ("details" → (details - "artifacts")) + case audit if (audit \ "objectType").asOpt[String].contains("alert") => + (audit \ "details").asOpt[JsObject].fold(audit) { details => + audit + ("details" -> (details - "artifacts")) } - case audit ⇒ audit + case audit => audit }, // Regenerate all alert ID - mapEntity("alert") { alert ⇒ + mapEntity("alert") { alert => val alertId = JsString(generateAlertId(alert)) - alert + ("_id" → alertId) + ("_routing" → alertId) + alert + ("_id" -> alertId) + ("_routing" -> alertId) }, // and overwrite alert id in audit trail - Operation((f: String ⇒ Source[JsObject, NotUsed]) ⇒ { - case "audit" ⇒ + Operation((f: String => Source[JsObject, NotUsed]) => { + case "audit" => f("audit").flatMapConcat { - case audit if (audit \ "objectType").asOpt[String].contains("alert") ⇒ - val updatedAudit = (audit \ "objectId").asOpt[String].fold(Future.successful(audit)) { alertId ⇒ + case audit if (audit \ "objectType").asOpt[String].contains("alert") => + val updatedAudit = (audit \ "objectId").asOpt[String].fold(Future.successful(audit)) { alertId => ds.getEntity("alert", alertId) - .map { alert ⇒ - audit + ("objectId" → JsString(generateAlertId(alert))) + .map { alert => + audit + ("objectId" -> JsString(generateAlertId(alert))) } .recover { - case e ⇒ + case e => logger.error(s"Get alert $alertId", e) audit } } Source.future(updatedAudit) - case audit ⇒ Source.single(audit) + case audit => Source.single(audit) } - case other ⇒ f(other) + case other => f(other) }) ) - case DatabaseState(13) ⇒ + case DatabaseState(13) => Seq( - addAttribute("alert", "customFields" → JsObject.empty), - addAttribute("case_task", "group" → JsString("default")), - addAttribute("case", "pap" → JsNumber(2)) + addAttribute("alert", "customFields" -> JsObject.empty), + addAttribute("case_task", "group" -> JsString("default")), + addAttribute("case", "pap" -> JsNumber(2)) ) - case DatabaseState(14) ⇒ + case DatabaseState(14) => Seq( - mapEntity("sequence") { seq ⇒ + mapEntity("sequence") { seq => val oldId = (seq \ "_id").as[String] val counter = (seq \ "counter").as[JsNumber] seq - "counter" - "_routing" + - ("_id" → JsString("sequence_" + oldId)) + - ("sequenceCounter" → counter) + ("_id" -> JsString("sequence_" + oldId)) + + ("sequenceCounter" -> counter) } ) - case DatabaseState(15) ⇒ Nil + case DatabaseState(15) => Nil + case DatabaseState(16) => Nil } private def generateAlertId(alert: JsObject): String = { diff --git a/thehive-backend/app/models/Roles.scala b/thehive-backend/app/models/Roles.scala index 9e6e574b31..52eaee8409 100644 --- a/thehive-backend/app/models/Roles.scala +++ b/thehive-backend/app/models/Roles.scala @@ -29,8 +29,8 @@ object Roles { object RoleAttributeFormat extends AttributeFormat[Role]("role") { override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match { - case JsString(v) if subNames.isEmpty && Roles.isValid(v) ⇒ Good(value) - case _ ⇒ formatError(JsonInputValue(value)) + case JsString(v) if subNames.isEmpty && Roles.isValid(v) => Good(value) + case _ => formatError(JsonInputValue(value)) } override def fromInputValue(subNames: Seq[String], value: InputValue): Role Or Every[AttributeError] = @@ -38,10 +38,10 @@ object RoleAttributeFormat extends AttributeFormat[Role]("role") { formatError(value) else (value match { - case StringInputValue(Seq(v)) ⇒ Good(v) - case JsonInputValue(JsString(v)) ⇒ Good(v) - case _ ⇒ formatError(value) - }).flatMap(v ⇒ Roles.withName(v).fold[Role Or Every[AttributeError]](formatError(value))(role ⇒ Good(role))) + case StringInputValue(Seq(v)) => Good(v) + case JsonInputValue(JsString(v)) => Good(v) + case _ => formatError(value) + }).flatMap(v => Roles.withName(v).fold[Role Or Every[AttributeError]](formatError(value))(role => Good(role))) override def elasticType(attributeName: String): KeywordField = keywordField(attributeName) } diff --git a/thehive-backend/app/models/Task.scala b/thehive-backend/app/models/Task.scala index beac6633e0..1f9918ec7e 100644 --- a/thehive-backend/app/models/Task.scala +++ b/thehive-backend/app/models/Task.scala @@ -12,7 +12,7 @@ import models.JsonFormat.taskStatusFormat import services.AuditedModel import org.elastic4play.JsonFormat.dateFormat -import org.elastic4play.models.{AttributeDef, BaseEntity, ChildModelDef, EntityDef, HiveEnumeration, AttributeFormat ⇒ F} +import org.elastic4play.models.{AttributeDef, BaseEntity, ChildModelDef, EntityDef, HiveEnumeration, AttributeFormat => F} import org.elastic4play.utils.RichJson object TaskStatus extends Enumeration with HiveEnumeration { @@ -20,7 +20,7 @@ object TaskStatus extends Enumeration with HiveEnumeration { val Waiting, InProgress, Completed, Cancel = Value } -trait TaskAttributes { _: AttributeDef ⇒ +trait TaskAttributes { _: AttributeDef => val title = attribute("title", F.textFmt, "Title of the task") val description = optionalAttribute("description", F.textFmt, "Task details") val owner = optionalAttribute("owner", F.userFmt, "User who owns the task") @@ -42,14 +42,14 @@ class TaskModel @Inject()(caseModel: CaseModel) override def updateHook(task: BaseEntity, updateAttrs: JsObject): Future[JsObject] = Future.successful { (updateAttrs \ "status").asOpt[TaskStatus.Type] match { - case Some(TaskStatus.InProgress) ⇒ + case Some(TaskStatus.InProgress) => updateAttrs .setIfAbsent("startDate", new Date) - case Some(TaskStatus.Completed) ⇒ + case Some(TaskStatus.Completed) => updateAttrs .setIfAbsent("endDate", new Date) + - ("flag" → JsFalse) - case _ ⇒ updateAttrs + ("flag" -> JsFalse) + case _ => updateAttrs } } } diff --git a/thehive-backend/app/models/User.scala b/thehive-backend/app/models/User.scala index 8c88d3465c..7816989d74 100644 --- a/thehive-backend/app/models/User.scala +++ b/thehive-backend/app/models/User.scala @@ -13,7 +13,7 @@ object UserStatus extends Enumeration with HiveEnumeration { val Ok, Locked = Value } -trait UserAttributes { _: AttributeDef ⇒ +trait UserAttributes { _: AttributeDef => val login = attribute("login", F.userFmt, "Login of the user", O.form) val userId = attribute("_id", F.stringFmt, "User id (login)", O.model) val key = optionalAttribute("key", F.stringFmt, "API key", O.sensitive, O.unaudited) @@ -27,10 +27,10 @@ trait UserAttributes { _: AttributeDef ⇒ class UserModel extends ModelDef[UserModel, User]("user", "User", "/user") with UserAttributes with AuditedModel { - override def removeAttribute: JsObject = Json.obj("status" → UserStatus.Locked) + override def removeAttribute: JsObject = Json.obj("status" -> UserStatus.Locked) - private def setUserId(attrs: JsObject) = (attrs \ "login").asOpt[JsString].fold(attrs) { login ⇒ - attrs - "login" + ("_id" → login) + private def setUserId(attrs: JsObject) = (attrs \ "login").asOpt[JsString].fold(attrs) { login => + attrs - "login" + ("_id" -> login) } override def creationHook(parent: Option[BaseEntity], attrs: JsObject): Future[JsObject] = Future.successful(setUserId(attrs)) @@ -42,6 +42,6 @@ class User(model: UserModel, attributes: JsObject) extends EntityDef[UserModel, override def toJson: JsObject = super.toJson + - ("roles" → JsArray(roles().map(r ⇒ JsString(r.name.toLowerCase())))) + - ("hasKey" → JsBoolean(key().isDefined)) + ("roles" -> JsArray(roles().map(r => JsString(r.name.toLowerCase())))) + + ("hasKey" -> JsBoolean(key().isDefined)) } diff --git a/thehive-backend/app/models/package.scala b/thehive-backend/app/models/package.scala index 92fd3669a9..62ee5ca8eb 100644 --- a/thehive-backend/app/models/package.scala +++ b/thehive-backend/app/models/package.scala @@ -1,3 +1,3 @@ package object models { - val modelVersion = 16 + val modelVersion = 17 } diff --git a/thehive-backend/app/services/AlertSrv.scala b/thehive-backend/app/services/AlertSrv.scala index 8f23953d6f..e7325deebd 100644 --- a/thehive-backend/app/services/AlertSrv.scala +++ b/thehive-backend/app/services/AlertSrv.scala @@ -59,28 +59,28 @@ class AlertSrv @Inject()( val artifactsFields = Future.traverse(fields.getValues("artifacts")) { - case a: JsObject if (a \ "dataType").asOpt[String].contains("file") ⇒ + case a: JsObject if (a \ "dataType").asOpt[String].contains("file") => (a \ "data").asOpt[String] match { - case Some(dataExtractor(filename, contentType, data)) ⇒ + case Some(dataExtractor(filename, contentType, data)) => attachmentSrv .save(filename, contentType, java.util.Base64.getDecoder.decode(data)) - .map(attachment ⇒ a - "data" + ("attachment" → Json.toJson(attachment))) - case _ ⇒ Future.successful(a) + .map(attachment => a - "data" + ("attachment" -> Json.toJson(attachment))) + case _ => Future.successful(a) } - case a ⇒ Future.successful(a) + case a => Future.successful(a) } - artifactsFields.flatMap { af ⇒ - val validArtifacts = af.filter { a ⇒ + artifactsFields.flatMap { af => + val validArtifacts = af.filter { a => val hasAttachment = (a \ "attachment").asOpt[JsObject].isDefined val hasData = (a \ "data").asOpt[String].isDefined val dataType = (a \ "dataType").asOpt[String] val isValid = dataType match { - case None ⇒ false - case Some("file") ⇒ hasAttachment && !hasData - case _ ⇒ !hasAttachment && hasData + case None => false + case Some("file") => hasAttachment && !hasData + case _ => !hasAttachment && hasData } if (!isValid) { - val dataTypeStr = dataType.fold("DataType is not set!")(d ⇒ s"DataType is $d") + val dataTypeStr = dataType.fold("DataType is not set!")(d => s"DataType is $d") val dataStr = if (hasData) "data is set" else "data is not set" val attachmentStr = if (hasAttachment) "attachment is set" else "attachment is not set" logger.warn( @@ -90,11 +90,11 @@ class AlertSrv @Inject()( isValid } /* remove duplicate artifacts */ - val distinctArtifacts = Collection.distinctBy(validArtifacts) { a ⇒ + val distinctArtifacts = Collection.distinctBy(validArtifacts) { a => val data = (a \ "data").asOpt[String] val attachment = (a \ "attachment" \ "id").asOpt[String] val dataType = (a \ "dataType").asOpt[String] - data.orElse(attachment).map(_ → dataType).getOrElse(a) + data.orElse(attachment).map(_ -> dataType).getOrElse(a) } createSrv[AlertModel, Alert](alertModel, fields.set("artifacts", JsArray(distinctArtifacts))) } @@ -118,8 +118,8 @@ class AlertSrv @Inject()( def update(id: String, fields: Fields, modifyConfig: ModifyConfig)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Alert] = for { - alert ← get(id) - updatedAlert ← update(alert, fields, modifyConfig) + alert <- get(id) + updatedAlert <- update(alert, fields, modifyConfig) } yield updatedAlert def update(alert: Alert, fields: Fields)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Alert] = @@ -130,14 +130,14 @@ class AlertSrv @Inject()( val newStatus = if (follow && alert.status() != AlertStatus.New) AlertStatus.Updated else alert.status() val updatedAlert = updateSrv(alert, fields.set("status", Json.toJson(newStatus)), modifyConfig) alert.caze() match { - case Some(caseId) if follow ⇒ + case Some(caseId) if follow => for { - caze ← caseSrv.get(caseId) - a ← updatedAlert - _ ← importArtifacts(a, caze) - _ ← caseSrv.update(caze, Fields.empty.set("status", CaseStatus.Open.toString)) + caze <- caseSrv.get(caseId) + a <- updatedAlert + _ <- importArtifacts(a, caze) + _ <- caseSrv.update(caze, Fields.empty.set("status", CaseStatus.Open.toString)) } yield a - case _ ⇒ updatedAlert + case _ => updatedAlert } } @@ -164,8 +164,8 @@ class AlertSrv @Inject()( modifyConfig: ModifyConfig = ModifyConfig.default )(implicit authContext: AuthContext, ec: ExecutionContext): Future[Alert] = alert.caze() match { - case Some(_) ⇒ updateSrv[AlertModel, Alert](alertModel, alert.id, Fields.empty.set("status", "Imported"), modifyConfig) - case None ⇒ updateSrv[AlertModel, Alert](alertModel, alert.id, Fields.empty.set("status", "Ignored"), modifyConfig) + case Some(_) => updateSrv[AlertModel, Alert](alertModel, alert.id, Fields.empty.set("status", "Imported"), modifyConfig) + case None => updateSrv[AlertModel, Alert](alertModel, alert.id, Fields.empty.set("status", "Ignored"), modifyConfig) } def markAsUnread( @@ -173,34 +173,34 @@ class AlertSrv @Inject()( modifyConfig: ModifyConfig = ModifyConfig.default )(implicit authContext: AuthContext, ec: ExecutionContext): Future[Alert] = alert.caze() match { - case Some(_) ⇒ updateSrv[AlertModel, Alert](alertModel, alert.id, Fields.empty.set("status", "Updated"), modifyConfig) - case None ⇒ updateSrv[AlertModel, Alert](alertModel, alert.id, Fields.empty.set("status", "New"), modifyConfig) + case Some(_) => updateSrv[AlertModel, Alert](alertModel, alert.id, Fields.empty.set("status", "Updated"), modifyConfig) + case None => updateSrv[AlertModel, Alert](alertModel, alert.id, Fields.empty.set("status", "New"), modifyConfig) } def getCaseTemplate(customCaseTemplate: Option[String])(implicit ec: ExecutionContext): Future[Option[CaseTemplate]] = - customCaseTemplate.fold[Future[Option[CaseTemplate]]](Future.successful(None)) { templateName ⇒ + customCaseTemplate.fold[Future[Option[CaseTemplate]]](Future.successful(None)) { templateName => caseTemplateSrv .getByName(templateName) - .map { ct ⇒ + .map { ct => Some(ct) } - .recover { case _ ⇒ None } + .recover { case _ => None } } def createCase(alert: Alert, customCaseTemplate: Option[String])(implicit authContext: AuthContext, ec: ExecutionContext): Future[Case] = alert.caze() match { - case Some(id) ⇒ caseSrv.get(id) - case None ⇒ + case Some(id) => caseSrv.get(id) + case None => connectors.get(alert.tpe()) match { - case Some(connector: AlertTransformer) ⇒ + case Some(connector: AlertTransformer) => for { - caze ← connector.createCase(alert, customCaseTemplate) - _ ← setCase(alert, caze) + caze <- connector.createCase(alert, customCaseTemplate) + _ <- setCase(alert, caze) } yield caze - case _ ⇒ + case _ => for { - caseTemplate ← getCaseTemplate(customCaseTemplate) - caze ← caseSrv.create( + caseTemplate <- getCaseTemplate(customCaseTemplate) + caze <- caseSrv.create( Fields .empty .set("title", alert.title()) @@ -213,52 +213,52 @@ class AlertSrv @Inject()( .set("customFields", alert.customFields()), caseTemplate ) - _ ← importArtifacts(alert, caze) - _ ← setCase(alert, caze) + _ <- importArtifacts(alert, caze) + _ <- setCase(alert, caze) } yield caze } } override def mergeWithCase(alert: Alert, caze: Case)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Case] = alert.caze() match { - case Some(id) ⇒ caseSrv.get(id) - case None ⇒ + case Some(id) => caseSrv.get(id) + case None => connectors.get(alert.tpe()) match { - case Some(connector: AlertTransformer) ⇒ + case Some(connector: AlertTransformer) => for { - updatedCase ← connector.mergeWithCase(alert, caze) - _ ← setCase(alert, updatedCase) + updatedCase <- connector.mergeWithCase(alert, caze) + _ <- setCase(alert, updatedCase) } yield updatedCase - case _ ⇒ + case _ => for { - _ ← importArtifacts(alert, caze) + _ <- importArtifacts(alert, caze) newDescription = caze .description() + s"\n \n#### Merged with alert #${alert.sourceRef()} ${alert.title()}\n\n${alert.description().trim}" newTags = (caze.tags() ++ alert.tags()).distinct.map(JsString.apply) - updatedCase ← caseSrv.update( + updatedCase <- caseSrv.update( caze, Fields .empty .set("description", newDescription) .set("tags", JsArray(newTags)) ) - _ ← setCase(alert, caze) + _ <- setCase(alert, caze) } yield updatedCase } } def bulkMergeWithCase(alerts: Seq[Alert], caze: Case)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Case] = Future - .traverse(alerts) { alert ⇒ + .traverse(alerts) { alert => for { - _ ← importArtifacts(alert, caze) - _ ← setCase(alert, caze) + _ <- importArtifacts(alert, caze) + _ <- setCase(alert, caze) } yield () } - .flatMap { _ ⇒ // then merge all tags + .flatMap { _ => // then merge all tags val newTags = (caze.tags() ++ alerts.flatMap(_.tags())).distinct.map(JsString.apply) val newDescription = caze.description() + alerts - .map(alert ⇒ s"\n \n#### Merged with alert #${alert.sourceRef()} ${alert.title()}\n\n${alert.description().trim}") + .map(alert => s"\n \n#### Merged with alert #${alert.sourceRef()} ${alert.title()}\n\n${alert.description().trim}") .mkString("") caseSrv.update( caze, @@ -272,41 +272,41 @@ class AlertSrv @Inject()( def importArtifacts(alert: Alert, caze: Case)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Case] = { val artifactsFields = alert .artifacts() - .flatMap { artifact ⇒ + .flatMap { artifact => val tags = (artifact \ "tags").asOpt[Seq[JsString]].getOrElse(Nil) :+ JsString("src:" + alert.tpe()) val message = (artifact \ "message").asOpt[JsString].getOrElse(JsString("")) (artifact \ "dataType") .asOpt[String] .flatMap { - case "file" if !artifact.value.contains("attachment") ⇒ + case "file" if !artifact.value.contains("attachment") => (artifact \ "data").asOpt[String].collect { - case dataExtractor(filename, contentType, data) ⇒ + case dataExtractor(filename, contentType, data) => val f = Files.createTempFile("alert-", "-attachment") Files.write(f, java.util.Base64.getDecoder.decode(data)) Fields( artifact + - ("tags" → JsArray(tags)) + - ("message" → message) + ("tags" -> JsArray(tags)) + + ("message" -> message) ).set("attachment", FileInputValue(filename, f, contentType)) .unset("data") } - case "file" ⇒ + case "file" => Some( Fields( artifact + - ("tags" → JsArray(tags)) + - ("message" → message) + ("tags" -> JsArray(tags)) + + ("message" -> message) ) ) - case _ if artifact.value.contains("data") ⇒ + case _ if artifact.value.contains("data") => Some( Fields( artifact + - ("tags" → JsArray(tags)) + - ("message" → message) + ("tags" -> JsArray(tags)) + + ("message" -> message) ) ) - case _ ⇒ None + case _ => None } .orElse { logger.warn(s"Invalid artifact format: $artifact") @@ -316,44 +316,44 @@ class AlertSrv @Inject()( val updatedCase = artifactSrv .create(caze, artifactsFields) - .flatMap { artifacts ⇒ + .flatMap { artifacts => Future.traverse(artifacts) { - case Success(_) ⇒ Future.successful(()) - case Failure(ConflictError(_, attributes)) ⇒ // if it already exists, add tags from alert + case Success(_) => Future.successful(()) + case Failure(ConflictError(_, attributes)) => // if it already exists, add tags from alert import org.elastic4play.services.QueryDSL._ (for { - dataType ← (attributes \ "dataType").asOpt[String] + dataType <- (attributes \ "dataType").asOpt[String] data = (attributes \ "data").asOpt[String] attachment = (attributes \ "attachment").asOpt[Attachment] - tags ← (attributes \ "tags").asOpt[Seq[String]] - _ ← data orElse attachment + tags <- (attributes \ "tags").asOpt[Seq[String]] + _ <- data orElse attachment dataOrAttachment = data.toLeft(attachment.get) } yield artifactSrv .find(artifactSrv.similarArtifactFilter(dataType, dataOrAttachment, withParent(caze)), None, Nil) ._1 - .mapAsyncUnordered(1) { artifact ⇒ + .mapAsyncUnordered(1) { artifact => artifactSrv.update(artifact.id, Fields.empty.set("tags", JsArray((artifact.tags() ++ tags).distinct.map(JsString.apply)))) } - .map(_ ⇒ caze) + .map(_ => caze) .runWith(Sink.ignore) - .map(_ ⇒ caze)) + .map(_ => caze)) .getOrElse { logger.warn(s"A conflict error occurs when creating the artifact $attributes but it doesn't exist") Future.successful(()) } - case Failure(e) ⇒ + case Failure(e) => logger.warn("Create artifact error", e) Future.successful(()) } } - .map(_ ⇒ caze) - updatedCase.onComplete { _ ⇒ + .map(_ => caze) + updatedCase.onComplete { _ => // remove temporary files artifactsFields .flatMap(_.get("Attachment")) .foreach { - case FileInputValue(_, file, _) ⇒ Files.delete(file) - case _ ⇒ + case FileInputValue(_, file, _) => Files.delete(file) + case _ => } } updatedCase @@ -363,25 +363,25 @@ class AlertSrv @Inject()( implicit authContext: AuthContext, ec: ExecutionContext ): Future[Alert] = - updateSrv(alert, Fields(Json.obj("case" → caze.id, "status" → AlertStatus.Imported)), modifyConfig) + updateSrv(alert, Fields(Json.obj("case" -> caze.id, "status" -> AlertStatus.Imported)), modifyConfig) def unsetCase( alert: Alert, modifyConfig: ModifyConfig = ModifyConfig.default )(implicit authContext: AuthContext, ec: ExecutionContext): Future[Alert] = { val status = alert.status match { - case AlertStatus.New ⇒ AlertStatus.New - case AlertStatus.Updated ⇒ AlertStatus.New - case AlertStatus.Ignored ⇒ AlertStatus.Ignored - case AlertStatus.Imported ⇒ AlertStatus.Ignored + case AlertStatus.New => AlertStatus.New + case AlertStatus.Updated => AlertStatus.New + case AlertStatus.Ignored => AlertStatus.Ignored + case AlertStatus.Imported => AlertStatus.Ignored } logger.debug(s"Remove case association in alert ${alert.id} (${alert.title}") - updateSrv(alert, Fields(Json.obj("case" → JsNull, "status" → status)), modifyConfig) + updateSrv(alert, Fields(Json.obj("case" -> JsNull, "status" -> status)), modifyConfig) } def delete(id: String, force: Boolean)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Unit] = if (force) deleteSrv.realDelete[AlertModel, Alert](alertModel, id) - else get(id).flatMap(alert ⇒ markAsUnread(alert)).map(_ ⇒ ()) + else get(id).flatMap(alert => markAsUnread(alert)).map(_ => ()) def find(queryDef: QueryDef, range: Option[String], sortBy: Seq[String])(implicit ec: ExecutionContext): (Source[Alert, NotUsed], Future[Long]) = findSrv[AlertModel, Alert](alertModel, queryDef, range, sortBy) @@ -392,57 +392,57 @@ class AlertSrv @Inject()( implicit authContext: AuthContext, ec: ExecutionContext ): Future[Alert] = - updateSrv[AlertModel, Alert](alertModel, alertId, Fields(Json.obj("follow" → follow)), modifyConfig) + updateSrv[AlertModel, Alert](alertModel, alertId, Fields(Json.obj("follow" -> follow)), modifyConfig) def similarCases(alert: Alert)(implicit ec: ExecutionContext): Future[Seq[CaseSimilarity]] = { def similarArtifacts(artifact: JsObject): Option[Source[Artifact, NotUsed]] = for { - dataType ← (artifact \ "dataType").asOpt[String] - data ← if (dataType == "file") + dataType <- (artifact \ "dataType").asOpt[String] + data <- if (dataType == "file") (artifact \ "attachment").asOpt[Attachment].map(Right.apply) else (artifact \ "data").asOpt[String].map(Left.apply) } yield artifactSrv.findSimilar(dataType, data, None, Some("all"), Nil)._1 Source(alert.artifacts().to[immutable.Iterable]) - .flatMapConcat { artifact ⇒ + .flatMapConcat { artifact => similarArtifacts(artifact) .getOrElse(Source.empty) } - .fold(Map.empty[String, (Int, Int)]) { (similarCases, artifact) ⇒ + .fold(Map.empty[String, (Int, Int)]) { (similarCases, artifact) => val caseId = artifact.parentId.getOrElse(sys.error(s"Artifact ${artifact.id} has no case !")) val (iocCount, artifactCount) = similarCases.getOrElse(caseId, (0, 0)) if (artifact.ioc()) - similarCases + (caseId → ((iocCount + 1, artifactCount))) + similarCases + (caseId -> ((iocCount + 1, artifactCount))) else - similarCases + (caseId → ((iocCount, artifactCount + 1))) + similarCases + (caseId -> ((iocCount, artifactCount + 1))) } .mapConcat(identity) .mapAsyncUnordered(5) { - case (caseId, (similarIOCCount, similarArtifactCount)) ⇒ + case (caseId, (similarIOCCount, similarArtifactCount)) => caseSrv.get(caseId).map((_, similarIOCCount, similarArtifactCount)) } .filter { - case (caze, _, _) ⇒ caze.status() != CaseStatus.Deleted && !caze.resolutionStatus().contains(CaseResolutionStatus.Duplicated) + case (caze, _, _) => caze.status() != CaseStatus.Deleted && !caze.resolutionStatus().contains(CaseResolutionStatus.Duplicated) } .mapAsyncUnordered(5) { - case (caze, similarIOCCount, similarArtifactCount) ⇒ + case (caze, similarIOCCount, similarArtifactCount) => for { - artifactCountJs ← artifactSrv.stats(parent("case", withId(caze.id)), Seq(groupByField("ioc", selectCount))) + artifactCountJs <- artifactSrv.stats(parent("case", withId(caze.id)), Seq(groupByField("ioc", selectCount))) iocCount = (artifactCountJs \ "1" \ "count").asOpt[Int].getOrElse(0) artifactCount = (artifactCountJs \\ "count").map(_.as[Int]).sum } yield CaseSimilarity(caze, similarIOCCount, iocCount, similarArtifactCount, artifactCount) - case _ ⇒ Future.failed(InternalError("Case not found")) + case _ => Future.failed(InternalError("Case not found")) } .runWith(Sink.seq) } def getArtifactSeen(artifact: JsObject)(implicit ec: ExecutionContext): Future[Long] = { val maybeArtifactSeen = for { - dataType ← (artifact \ "dataType").asOpt[String] - data ← dataType match { - case "file" ⇒ (artifact \ "attachment").asOpt[Attachment].map(Right.apply) - case _ ⇒ (artifact \ "data").asOpt[String].map(Left.apply) + dataType <- (artifact \ "dataType").asOpt[String] + data <- dataType match { + case "file" => (artifact \ "attachment").asOpt[Attachment].map(Right.apply) + case _ => (artifact \ "data").asOpt[String].map(Left.apply) } numberOfSimilarArtifacts = artifactSrv.findSimilar(dataType, data, None, None, Nil)._2 } yield numberOfSimilarArtifacts @@ -450,8 +450,8 @@ class AlertSrv @Inject()( } def alertArtifactsWithSeen(alert: Alert)(implicit ec: ExecutionContext): Future[Seq[JsObject]] = - Future.traverse(alert.artifacts()) { artifact ⇒ - getArtifactSeen(artifact).map(seen ⇒ artifact + ("seen" → JsNumber(seen))) + Future.traverse(alert.artifacts()) { artifact => + getArtifactSeen(artifact).map(seen => artifact + ("seen" -> JsNumber(seen))) } def fixStatus()(implicit authContext: AuthContext, ec: ExecutionContext): Future[Unit] = { @@ -459,30 +459,30 @@ class AlertSrv @Inject()( val updatedStatusFields = Fields.empty.set("status", "Updated") val (updateAlerts, updateAlertCount) = find("status" ~= "Update", Some("all"), Nil) - updateAlertCount.foreach(c ⇒ logger.info(s"Updating $c alert with Update status")) + updateAlertCount.foreach(c => logger.info(s"Updating $c alert with Update status")) val updateAlertProcess = updateAlerts - .mapAsyncUnordered(3) { alert ⇒ + .mapAsyncUnordered(3) { alert => logger.debug(s"Updating alert ${alert.id} (status: Update → Updated)") update(alert, updatedStatusFields) .andThen { - case Failure(error) ⇒ logger.warn(s"""Fail to set "Updated" status to alert ${alert.id}""", error) + case Failure(error) => logger.warn(s"""Fail to set "Updated" status to alert ${alert.id}""", error) } } val ignoredStatusFields = Fields.empty.set("status", "Ignored") val (ignoreAlerts, ignoreAlertCount) = find("status" ~= "Ignore", Some("all"), Nil) - ignoreAlertCount.foreach(c ⇒ logger.info(s"Updating $c alert with Ignore status")) + ignoreAlertCount.foreach(c => logger.info(s"Updating $c alert with Ignore status")) val ignoreAlertProcess = ignoreAlerts - .mapAsyncUnordered(3) { alert ⇒ + .mapAsyncUnordered(3) { alert => logger.debug(s"Updating alert ${alert.id} (status: Ignore → Ignored)") update(alert, ignoredStatusFields) .andThen { - case Failure(error) ⇒ logger.warn(s"""Fail to set "Ignored" status to alert ${alert.id}""", error) + case Failure(error) => logger.warn(s"""Fail to set "Ignored" status to alert ${alert.id}""", error) } } (updateAlertProcess ++ ignoreAlertProcess) .runWith(Sink.ignore) - .map(_ ⇒ ()) + .map(_ => ()) } } diff --git a/thehive-backend/app/services/ArtifactSrv.scala b/thehive-backend/app/services/ArtifactSrv.scala index 2f9ccff25e..a6f41997e4 100644 --- a/thehive-backend/app/services/ArtifactSrv.scala +++ b/thehive-backend/app/services/ArtifactSrv.scala @@ -42,25 +42,26 @@ class ArtifactSrv @Inject()( def create(caseId: String, fields: Fields)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Artifact] = getSrv[CaseModel, Case](caseModel, caseId) - .flatMap { caze ⇒ + .flatMap { caze => create(caze, fields) } def create(caze: Case, fields: Fields)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Artifact] = createSrv[ArtifactModel, Artifact, Case](artifactModel, caze, fields) .recoverWith { - case _: ConflictError ⇒ updateIfDeleted(caze, fields) // if the artifact already exists, search it and update it + case _: ConflictError => updateIfDeleted(caze, fields) // if the artifact already exists, search it and update it } private def updateIfDeleted(caze: Case, fields: Fields, modifyConfig: ModifyConfig = ModifyConfig.default)( - implicit authContext: AuthContext, ec: ExecutionContext + implicit authContext: AuthContext, + ec: ExecutionContext ): Future[Artifact] = - fieldsSrv.parse(fields, artifactModel).toFuture.flatMap { attrs ⇒ + fieldsSrv.parse(fields, artifactModel).toFuture.flatMap { attrs => val updatedArtifact = for { - id ← artifactModel.computeId(caze, attrs) - artifact ← getSrv[ArtifactModel, Artifact](artifactModel, id) + id <- artifactModel.computeId(caze, attrs) + artifact <- getSrv[ArtifactModel, Artifact](artifactModel, id) if artifact.status() == ArtifactStatus.Deleted - updatedArtifact ← updateSrv[ArtifactModel, Artifact]( + updatedArtifact <- updateSrv[ArtifactModel, Artifact]( artifactModel, artifact.id, fields @@ -71,36 +72,40 @@ class ArtifactSrv @Inject()( ) } yield updatedArtifact updatedArtifact.recoverWith { - case _ ⇒ Future.failed(ConflictError("Artifact already exists", attrs)) + case _ => Future.failed(ConflictError("Artifact already exists", attrs)) } } def create(caseId: String, fieldSet: Seq[Fields])(implicit authContext: AuthContext, ec: ExecutionContext): Future[Seq[Try[Artifact]]] = getSrv[CaseModel, Case](caseModel, caseId) - .flatMap { caze ⇒ + .flatMap { caze => create(caze, fieldSet) } def create(caze: Case, fieldSet: Seq[Fields])(implicit authContext: AuthContext, ec: ExecutionContext): Future[Seq[Try[Artifact]]] = - createSrv[ArtifactModel, Artifact, Case](artifactModel, fieldSet.map(caze → _)) + createSrv[ArtifactModel, Artifact, Case](artifactModel, fieldSet.map(caze -> _)) .flatMap { // if there is failure - case t if t.exists(_.isFailure) ⇒ + case t if t.exists(_.isFailure) => Future.traverse(t.zip(fieldSet)) { - case (Failure(ConflictError(_, _)), fields) ⇒ updateIfDeleted(caze, fields).toTry - case (r, _) ⇒ Future.successful(r) + case (Failure(ConflictError(_, _)), fields) => updateIfDeleted(caze, fields).toTry + case (r, _) => Future.successful(r) } - case t ⇒ Future.successful(t) + case t => Future.successful(t) } def get(id: String)(implicit ec: ExecutionContext): Future[Artifact] = getSrv[ArtifactModel, Artifact](artifactModel, id) - def update(id: String, fields: Fields, modifyConfig: ModifyConfig = ModifyConfig.default)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Artifact] = + def update(id: String, fields: Fields, modifyConfig: ModifyConfig = ModifyConfig.default)( + implicit authContext: AuthContext, + ec: ExecutionContext + ): Future[Artifact] = updateSrv[ArtifactModel, Artifact](artifactModel, id, fields, modifyConfig) def bulkUpdate(ids: Seq[String], fields: Fields, modifyConfig: ModifyConfig = ModifyConfig.default)( - implicit authContext: AuthContext, ec: ExecutionContext + implicit authContext: AuthContext, + ec: ExecutionContext ): Future[Seq[Try[Artifact]]] = updateSrv.apply[ArtifactModel, Artifact](artifactModel, ids, fields, modifyConfig) @@ -109,13 +114,13 @@ class ArtifactSrv @Inject()( def realDelete(artifact: Artifact)(implicit ec: ExecutionContext): Future[Unit] = for { - _ ← auditSrv + _ <- auditSrv .findFor(artifact, Some("all"), Nil) ._1 .mapAsync(1)(auditSrv.realDelete) .runWith(Sink.ignore) _ = eventSrv.publish(RemoveJobsOf(artifact.id)) - _ ← dbRemove(artifact) + _ <- dbRemove(artifact) } yield () def find(queryDef: QueryDef, range: Option[String], sortBy: Seq[String])(implicit ec: ExecutionContext): (Source[Artifact, NotUsed], Future[Long]) = @@ -125,12 +130,14 @@ class ArtifactSrv @Inject()( def isSeen(artifact: Artifact)(implicit ec: ExecutionContext): Future[Long] = { import org.elastic4play.services.QueryDSL._ - findSrv(artifactModel, similarArtifactFilter(artifact), selectCount).map { stats ⇒ + findSrv(artifactModel, similarArtifactFilter(artifact), selectCount).map { stats => (stats \ "count").asOpt[Long].getOrElse(1L) } } - def findSimilar(artifact: Artifact, range: Option[String], sortBy: Seq[String])(implicit ec: ExecutionContext): (Source[Artifact, NotUsed], Future[Long]) = + def findSimilar(artifact: Artifact, range: Option[String], sortBy: Seq[String])( + implicit ec: ExecutionContext + ): (Source[Artifact, NotUsed], Future[Long]) = find(similarArtifactFilter(artifact), range, sortBy) def findSimilar( @@ -145,9 +152,9 @@ class ArtifactSrv @Inject()( private[services] def similarArtifactFilter(artifact: Artifact): QueryDef = { import org.elastic4play.services.QueryDSL._ val data = (artifact.data(), artifact.attachment()) match { - case (Some(_data), None) ⇒ Left(_data) - case (None, Some(attachment)) ⇒ Right(attachment) - case _ ⇒ sys.error("") + case (Some(_data), None) => Left(_data) + case (None, Some(attachment)) => Right(attachment) + case _ => sys.error("") } val filter = parent("case", not(withId(artifact.parentId.get))) similarArtifactFilter(artifact.dataType(), data, filter) @@ -157,7 +164,7 @@ class ArtifactSrv @Inject()( import org.elastic4play.services.QueryDSL._ data match { // artifact is an hash - case Left(d) if dataType == "hash" ⇒ + case Left(d) if dataType == "hash" => and( filter, parent("case", and("status" ~!= CaseStatus.Deleted, "resolutionStatus" ~!= CaseResolutionStatus.Duplicated)), @@ -165,7 +172,7 @@ class ArtifactSrv @Inject()( or(and("data" ~= d, "dataType" ~= dataType), "attachment.hashes" ~= d) ) // artifact contains data but not an hash - case Left(d) ⇒ + case Left(d) => and( filter, parent("case", and("status" ~!= CaseStatus.Deleted, "resolutionStatus" ~!= CaseResolutionStatus.Duplicated)), @@ -174,9 +181,9 @@ class ArtifactSrv @Inject()( "dataType" ~= dataType ) // artifact is a file - case Right(attachment) ⇒ + case Right(attachment) => val hashes = attachment.hashes.map(_.toString) - val hashFilter = hashes.map { h ⇒ + val hashFilter = hashes.map { h => "attachment.hashes" ~= h } and( @@ -185,7 +192,7 @@ class ArtifactSrv @Inject()( "status" ~= "Ok", or( hashFilter :+ - and("dataType" ~= "hash", or(hashes.map { h ⇒ + and("dataType" ~= "hash", or(hashes.map { h => "data" ~= h })) ) diff --git a/thehive-backend/app/services/AuditSrv.scala b/thehive-backend/app/services/AuditSrv.scala index d4344eebba..4b52e8443e 100644 --- a/thehive-backend/app/services/AuditSrv.scala +++ b/thehive-backend/app/services/AuditSrv.scala @@ -18,19 +18,19 @@ import org.elastic4play.models.{Attribute, BaseEntity, BaseModelDef} import org.elastic4play.services._ import org.elastic4play.utils.{Instance, RichJson} -trait AuditedModel { self: BaseModelDef ⇒ +trait AuditedModel { self: BaseModelDef => def attributes: Seq[Attribute[_]] lazy val auditedAttributes: Map[String, Attribute[_]] = - attributes.collect { case a if !a.isUnaudited ⇒ a.attributeName → a }.toMap + attributes.collect { case a if !a.isUnaudited => a.attributeName -> a }.toMap def selectAuditedAttributes(attrs: JsObject): JsObject = JsObject { attrs.fields.flatMap { - case (attrName, value) ⇒ + case (attrName, value) => val attrNames = attrName.split("\\.").toSeq - auditedAttributes.get(attrNames.head).map { _ ⇒ + auditedAttributes.get(attrNames.head).map { _ => val reverseNames = attrNames.reverse - reverseNames.drop(1).foldLeft(reverseNames.head → value)((jsTuple, name) ⇒ name → JsObject(Seq(jsTuple))) + reverseNames.drop(1).foldLeft(reverseNames.head -> value)((jsTuple, name) => name -> JsObject(Seq(jsTuple))) } } } @@ -42,7 +42,7 @@ class AuditSrv @Inject()( modelSrv: ModelSrv, auxSrv: AuxSrv, dBRemove: DBRemove, - findSrv: FindSrv, + findSrv: FindSrv ) { private[AuditSrv] lazy val logger = Logger(getClass) @@ -51,46 +51,48 @@ class AuditSrv @Inject()( import org.elastic4play.services.QueryDSL._ val streamableEntities = modelSrv.list.collect { - case m: AuditedModel if m.modelName != "user" ⇒ m.modelName + case m: AuditedModel if m.modelName != "user" => m.modelName } val filter = rootId match { - case Some(rid) ⇒ and("rootId" ~= rid, "base" ~= true, "objectType" in (streamableEntities: _*)) - case None ⇒ and("base" ~= true, "objectType" in (streamableEntities: _*)) + case Some(rid) => and("rootId" ~= rid, "base" ~= true, "objectType" in (streamableEntities: _*)) + case None => and("base" ~= true, "objectType" in (streamableEntities: _*)) } val (src, total) = findSrv[AuditModel, Audit](auditModel, filter, Some(s"0-$count"), Seq("-startDate")) - val entities = src.mapAsync(5) { audit ⇒ + val entities = src.mapAsync(5) { audit => val fSummary = findSrv( auditModel, and("requestId" ~= audit.requestId(), "objectType" in (streamableEntities: _*)), groupByField("objectType", groupByField("operation", selectCount)) - ).map { json ⇒ + ).map { json => json.collectValues { - case objectType: JsObject ⇒ + case objectType: JsObject => objectType.collectValues { - case operation: JsObject ⇒ (operation \ "count").as[JsValue] + case operation: JsObject => (operation \ "count").as[JsValue] } } } val fObj = auxSrv.apply(audit.objectType(), audit.objectId(), 10, withStats = false, removeUnaudited = true) for { - summary ← fSummary - obj ← fObj - } yield JsObject(Seq("base" → (audit.toJson + ("object" → obj)), "summary" → summary)) + summary <- fSummary + obj <- fObj + } yield JsObject(Seq("base" -> (audit.toJson + ("object" -> obj)), "summary" -> summary)) } (entities, total) } def realDelete(audit: Audit)(implicit ec: ExecutionContext): Future[Unit] = - dBRemove(audit).map(_ ⇒ ()) + dBRemove(audit).map(_ => ()) def find(queryDef: QueryDef, range: Option[String], sortBy: Seq[String])(implicit ec: ExecutionContext): (Source[Audit, NotUsed], Future[Long]) = findSrv[AuditModel, Audit](auditModel, queryDef, range, sortBy) def stats(queryDef: QueryDef, aggs: Seq[Agg])(implicit ec: ExecutionContext): Future[JsObject] = findSrv(auditModel, queryDef, aggs: _*) - def findFor(entity: BaseEntity, range: Option[String], sortBy: Seq[String])(implicit ec: ExecutionContext): (Source[Audit, NotUsed], Future[Long]) = { + def findFor(entity: BaseEntity, range: Option[String], sortBy: Seq[String])( + implicit ec: ExecutionContext + ): (Source[Audit, NotUsed], Future[Long]) = { import org.elastic4play.services.QueryDSL._ findSrv[AuditModel, Audit](auditModel, and("objectId" ~= entity.id, "objectType" ~= entity.model.modelName), range, sortBy) } @@ -116,24 +118,24 @@ class AuditActor @Inject()(auditModel: AuditModel, createSrv: CreateSrv, eventSr } override def receive: Receive = { - case RequestProcessEnd(request, _) ⇒ + case RequestProcessEnd(request, _) => currentRequestIds = currentRequestIds - Instance.getRequestId(request) - case AuditOperation(EntityExtractor(model: AuditedModel, id, routing), action, details, authContext, date) ⇒ + case AuditOperation(EntityExtractor(model: AuditedModel, id, routing), action, details, authContext, date) => val requestId = authContext.requestId val audit = Json.obj( - "operation" → action, - "details" → model.selectAuditedAttributes(details), - "objectType" → model.modelName, - "objectId" → id, - "base" → !currentRequestIds.contains(requestId), - "startDate" → date, - "rootId" → routing, - "requestId" → requestId + "operation" -> action, + "details" -> model.selectAuditedAttributes(details), + "objectType" -> model.modelName, + "objectId" -> id, + "base" -> !currentRequestIds.contains(requestId), + "startDate" -> date, + "rootId" -> routing, + "requestId" -> requestId ) createSrv[AuditModel, Audit](auditModel, Fields(audit))(authContext, context.dispatcher) .failed - .foreach(t ⇒ logger.error("Audit error", t))(context.dispatcher) + .foreach(t => logger.error("Audit error", t))(context.dispatcher) currentRequestIds = currentRequestIds + requestId webHooks.send(audit) diff --git a/thehive-backend/app/services/CaseMergeSrv.scala b/thehive-backend/app/services/CaseMergeSrv.scala index 26a9daac40..69b18f4785 100644 --- a/thehive-backend/app/services/CaseMergeSrv.scala +++ b/thehive-backend/app/services/CaseMergeSrv.scala @@ -36,16 +36,16 @@ class CaseMergeSrv @Inject()( import org.elastic4play.services.QueryDSL._ - private[services] def concatOpt[E](entities: Seq[E], sep: String, getId: E ⇒ Long, getStr: E ⇒ Option[String]) = - JsString(entities.flatMap(e ⇒ getStr(e).map(s ⇒ s"#${getId(e)}:$s")).mkString(sep)) + private[services] def concatOpt[E](entities: Seq[E], sep: String, getId: E => Long, getStr: E => Option[String]) = + JsString(entities.flatMap(e => getStr(e).map(s => s"#${getId(e)}:$s")).mkString(sep)) - private[services] def concat[E](entities: Seq[E], sep: String, getId: E ⇒ Long, getStr: E ⇒ String) = - JsString(entities.map(e ⇒ s"#${getId(e)}:${getStr(e)}").mkString(sep)) + private[services] def concat[E](entities: Seq[E], sep: String, getId: E => Long, getStr: E => String) = + JsString(entities.map(e => s"#${getId(e)}:${getStr(e)}").mkString(sep)) private[services] def concatCaseDescription(cases: Seq[Case]) = { val str = cases .filterNot(_.description().trim.isEmpty) - .map { caze ⇒ + .map { caze => s"#### ${caze.title()} ([#${caze.caseId()}](#/case/${caze.id}/details))\n\n${caze.description()}" } .mkString("\n \n") @@ -58,39 +58,39 @@ class CaseMergeSrv @Inject()( val resolutionStatus = cases .map(_.resolutionStatus()) .reduce[Option[CaseResolutionStatus.Type]] { - case (None, s) ⇒ s - case (s, None) ⇒ s - case (Some(CaseResolutionStatus.Other), s) ⇒ s - case (s, Some(CaseResolutionStatus.Other)) ⇒ s - case (Some(CaseResolutionStatus.FalsePositive), s) ⇒ s - case (s, Some(CaseResolutionStatus.FalsePositive)) ⇒ s - case (Some(CaseResolutionStatus.Indeterminate), s) ⇒ s - case (s, Some(CaseResolutionStatus.Indeterminate)) ⇒ s - case (s, _) ⇒ s //TruePositive + case (None, s) => s + case (s, None) => s + case (Some(CaseResolutionStatus.Other), s) => s + case (s, Some(CaseResolutionStatus.Other)) => s + case (Some(CaseResolutionStatus.FalsePositive), s) => s + case (s, Some(CaseResolutionStatus.FalsePositive)) => s + case (Some(CaseResolutionStatus.Indeterminate), s) => s + case (s, Some(CaseResolutionStatus.Indeterminate)) => s + case (s, _) => s //TruePositive } - resolutionStatus.map(s ⇒ JsString(s.toString)) + resolutionStatus.map(s => JsString(s.toString)) } private[services] def mergeImpactStatus(cases: Seq[Case]) = { val impactStatus = cases .map(_.impactStatus()) .reduce[Option[CaseImpactStatus.Type]] { - case (None, s) ⇒ s - case (s, None) ⇒ s - case (Some(CaseImpactStatus.NotApplicable), s) ⇒ s - case (s, Some(CaseImpactStatus.NotApplicable)) ⇒ s - case (Some(CaseImpactStatus.NoImpact), s) ⇒ s - case (s, Some(CaseImpactStatus.NoImpact)) ⇒ s - case (s, _) ⇒ s // WithImpact + case (None, s) => s + case (s, None) => s + case (Some(CaseImpactStatus.NotApplicable), s) => s + case (s, Some(CaseImpactStatus.NotApplicable)) => s + case (Some(CaseImpactStatus.NoImpact), s) => s + case (s, Some(CaseImpactStatus.NoImpact)) => s + case (s, _) => s // WithImpact } - impactStatus.map(s ⇒ JsString(s.toString)) + impactStatus.map(s => JsString(s.toString)) } private[services] def mergeSummary(cases: Seq[Case]) = { val summary = cases - .flatMap(c ⇒ c.summary().map(_ → c.caseId())) + .flatMap(c => c.summary().map(_ -> c.caseId())) .map { - case (_summary, caseId) ⇒ s"#$caseId:${_summary}" + case (_summary, caseId) => s"#$caseId:${_summary}" } if (summary.isEmpty) None @@ -100,16 +100,16 @@ class CaseMergeSrv @Inject()( private[services] def mergeMetrics(cases: Seq[Case]): JsObject = { val metrics = for { - caze ← cases - metricsObject ← caze.metrics().asOpt[JsObject] + caze <- cases + metricsObject <- caze.metrics().asOpt[JsObject] } yield metricsObject - val mergedMetrics: Seq[(String, JsValue)] = metrics.flatMap(_.keys).distinct.map { key ⇒ - val metricValues = metrics.flatMap(m ⇒ (m \ key).asOpt[BigDecimal]) + val mergedMetrics: Seq[(String, JsValue)] = metrics.flatMap(_.keys).distinct.map { key => + val metricValues = metrics.flatMap(m => (m \ key).asOpt[BigDecimal]) if (metricValues.lengthCompare(1) != 0) - key → JsNull + key -> JsNull else - key → JsNumber(metricValues.head) + key -> JsNumber(metricValues.head) } JsObject(mergedMetrics) @@ -117,29 +117,31 @@ class CaseMergeSrv @Inject()( private[services] def mergeCustomFields(cases: Seq[Case]): JsObject = { val customFields = for { - caze ← cases - customFieldsObject ← caze.customFields().asOpt[JsObject] + caze <- cases + customFieldsObject <- caze.customFields().asOpt[JsObject] } yield customFieldsObject - val mergedCustomFieldsObject: Seq[(String, JsValue)] = customFields.flatMap(_.keys).distinct.flatMap { key ⇒ - val customFieldsValues = customFields.flatMap(cf ⇒ (cf \ key).asOpt[JsObject]).distinct + val mergedCustomFieldsObject: Seq[(String, JsValue)] = customFields.flatMap(_.keys).distinct.flatMap { key => + val customFieldsValues = customFields.flatMap(cf => (cf \ key).asOpt[JsObject]).distinct if (customFieldsValues.lengthCompare(1) != 0) None else - Some(key → customFieldsValues.head) + Some(key -> customFieldsValues.head) } JsObject(mergedCustomFieldsObject) } private[services] def baseFields(entity: BaseEntity): Fields = - Fields(entity.attributes - "_id" - "_routing" - "_parent" - "_type" - "_seqNo" - "_primaryTerm" - "createdBy" - "createdAt" - "updatedBy" - "updatedAt" - "user") + Fields( + entity.attributes - "_id" - "_routing" - "_parent" - "_type" - "_seqNo" - "_primaryTerm" - "createdBy" - "createdAt" - "updatedBy" - "updatedAt" - "user" + ) private[services] def mergeLogs(oldTask: Task, newTask: Task)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Done] = logSrv .find(withParent("case_task", oldTask.id), Some("all"), Nil) ._1 - .mapAsyncUnordered(5) { log ⇒ + .mapAsyncUnordered(5) { log => logSrv.create(newTask, baseFields(log)) } .runWith(Sink.ignore) @@ -151,47 +153,47 @@ class CaseMergeSrv @Inject()( Nil ) - futureTaskCount.foreach(count ⇒ logger.info(s"Creating $count task(s):")) + futureTaskCount.foreach(count => logger.info(s"Creating $count task(s):")) tasks - .mapAsyncUnordered(5) { task ⇒ - taskSrv.create(newCase, baseFields(task)).map(task → _) + .mapAsyncUnordered(5) { task => + taskSrv.create(newCase, baseFields(task)).map(task -> _) } .flatMapConcat { - case (oldTask, newTask) ⇒ + case (oldTask, newTask) => logger.info(s"\ttask : ${oldTask.id} → ${newTask.id} : ${newTask.title()}") val (logs, futureLogCount) = logSrv.find(and(parent("case_task", withId(oldTask.id)), "status" ~!= LogStatus.Deleted), Some("all"), Nil) - futureLogCount.foreach { count ⇒ + futureLogCount.foreach { count => logger.info(s"Creating $count log(s) in task ${newTask.id}") } - logs.map(_ → newTask) + logs.map(_ -> newTask) } .mapAsyncUnordered(5) { - case (log, task) ⇒ - val fields = log.attachment().fold(baseFields(log)) { a ⇒ + case (log, task) => + val fields = log.attachment().fold(baseFields(log)) { a => baseFields(log).set("attachment", AttachmentInputValue(a.name, a.hashes, a.size, a.contentType, a.id)) } logSrv.create(task, fields) } .runWith(Sink.ignore) .andThen { - case _ ⇒ + case _ => taskSrv .find(and(parent("case", withId(cases.map(_.id): _*)), "status" ~= TaskStatus.Waiting), Some("all"), Nil) ._1 .fold(Seq.empty[Task]) { - case (uniqueTasks, task) if !uniqueTasks.exists(_.title() == task.title()) ⇒ + case (uniqueTasks, task) if !uniqueTasks.exists(_.title() == task.title()) => uniqueTasks :+ task - case (uniqueTasks, _) ⇒ uniqueTasks + case (uniqueTasks, _) => uniqueTasks } .map(_.map(baseFields)) - .mapAsyncUnordered(5) { tasksFields ⇒ + .mapAsyncUnordered(5) { tasksFields => taskSrv.create(newCase, tasksFields) } .mapConcat(_.toList) .map { - case Failure(error) ⇒ logger.warn("Task creation fails", error) - case _ ⇒ + case Failure(error) => logger.warn("Task creation fails", error) + case _ => } .runWith(Sink.ignore) } @@ -201,74 +203,77 @@ class CaseMergeSrv @Inject()( val status = artifacts .map(_.status()) .reduce[ArtifactStatus.Type] { - case (ArtifactStatus.Deleted, s) ⇒ s - case (s, _) ⇒ s + case (ArtifactStatus.Deleted, s) => s + case (s, _) => s } .toString JsString(status) } - private[services] def mergeArtifactsAndJobs(newCase: Case, cases: Seq[Case])(implicit authContext: AuthContext, ec: ExecutionContext): Future[Done] = { - val caseMap = cases.map(c ⇒ c.id → c).toMap + private[services] def mergeArtifactsAndJobs( + newCase: Case, + cases: Seq[Case] + )(implicit authContext: AuthContext, ec: ExecutionContext): Future[Done] = { + val caseMap = cases.map(c => c.id -> c).toMap val caseFilter = and(parent("case", withId(cases.map(_.id): _*)), "status" ~= "Ok") // Find artifacts hold by cases val (artifacts, futureArtifactCount) = artifactSrv.find(caseFilter, Some("all"), Nil) - futureArtifactCount.foreach { count ⇒ + futureArtifactCount.foreach { count => logger.info(s"Found $count artifact(s) in merging cases") } artifacts - .mapAsyncUnordered(5) { artifact ⇒ + .mapAsyncUnordered(5) { artifact => // For each artifact find similar artifacts val dataFilter = artifact.data().map("data" ~= _) orElse artifact.attachment().map("attachment.id" ~= _.id) val filter = and(caseFilter, "status" ~= "Ok", "dataType" ~= artifact.dataType(), dataFilter.get) val (artifacts, futureArtifactCount) = artifactSrv.find(filter, Some("all"), Nil) - futureArtifactCount.foreach { count ⇒ + futureArtifactCount.foreach { count => logger.debug( s"$count identical artifact(s) found (${artifact.dataType()}):${(artifact.data() orElse artifact.attachment().map(_.name)).get}" ) } artifacts.runWith(Sink.seq) } - .mapAsync(5) { sameArtifacts ⇒ + .mapAsync(5) { sameArtifacts => // Same artifacts are merged val firstArtifact = sameArtifacts.head val fields = firstArtifact .attachment() - .fold(Fields.empty) { a ⇒ + .fold(Fields.empty) { a => Fields.empty.set("attachment", AttachmentInputValue(a.name, a.hashes, a.size, a.contentType, a.id)) } .set("data", firstArtifact.data().map(JsString)) .set("dataType", firstArtifact.dataType()) - .set("message", concatOpt[Artifact](sameArtifacts, "\n \n", a ⇒ caseMap(a.parentId.get).caseId(), _.message())) + .set("message", concatOpt[Artifact](sameArtifacts, "\n \n", a => caseMap(a.parentId.get).caseId(), _.message())) .set("startDate", firstDate(sameArtifacts.map(_.startDate()))) .set("tlp", JsNumber(sameArtifacts.map(_.tlp()).min)) .set("tags", JsArray(sameArtifacts.flatMap(_.tags()).distinct.map(JsString))) .set("ioc", JsBoolean(sameArtifacts.map(_.ioc()).reduce(_ || _))) .set("status", mergeArtifactStatus(sameArtifacts)) .set("sighted", JsBoolean(sameArtifacts.map(_.sighted()).reduce(_ || _))) - .set("reports", sameArtifacts.map(a ⇒ Json.parse(a.reports()).as[JsObject]).reduce(_ deepMerge _).toString) + .set("reports", sameArtifacts.map(a => Json.parse(a.reports()).as[JsObject]).reduce(_ deepMerge _).toString) // Merged artifact is created under new case artifactSrv .create(newCase, fields) - .map(a ⇒ List(a → sameArtifacts)) + .map(a => List(a -> sameArtifacts)) // Errors are logged and ignored (probably document already exists) .recover { - case e ⇒ + case e => logger.warn("Artifact creation fail", e) Nil } } .mapConcat(identity) .runForeach { - case (newArtifact, sameArtifacts) ⇒ + case (newArtifact, sameArtifacts) => // Then jobs are imported eventSrv.publish(MergeArtifact(newArtifact, sameArtifacts, authContext)) } } private[services] def mergeCases(cases: Seq[Case])(implicit authContext: AuthContext, ec: ExecutionContext): Future[Case] = { - logger.info("Merging cases: " + cases.map(c ⇒ s"#${c.caseId()}:${c.title()}").mkString(" / ")) + logger.info("Merging cases: " + cases.map(c => s"#${c.caseId()}:${c.title()}").mkString(" / ")) val fields = Fields .empty .set("title", concat[Case](cases, " / ", _.caseId(), _.title())) @@ -284,13 +289,13 @@ class CaseMergeSrv @Inject()( .set("resolutionStatus", mergeResolutionStatus(cases)) .set("impactStatus", mergeImpactStatus(cases)) .set("summary", mergeSummary(cases)) - .set("mergeFrom", JsArray(cases.map(c ⇒ JsString(c.id)))) + .set("mergeFrom", JsArray(cases.map(c => JsString(c.id)))) caseSrv.create(fields) } def markCaseAsDuplicated(cases: Seq[Case], mergeCase: Case)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Done] = Future - .traverse(cases) { caze ⇒ + .traverse(cases) { caze => val s = s"Merge into : ${mergeCase.title()} ([#${mergeCase.caseId()}](#/case/${mergeCase.id}/details))" val summary = caze.summary().fold(s)(_ + s"\n\n$s") caseSrv.update( @@ -303,20 +308,20 @@ class CaseMergeSrv @Inject()( .set("summary", summary) ) } - .map(_ ⇒ Done) + .map(_ => Done) .recover { - case error ⇒ + case error => logger.error("Case update fail", error) Done } def merge(caseIds: String*)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Case] = for { - cases ← Future.sequence(caseIds.map(caseSrv.get)) - newCase ← mergeCases(cases) - _ ← mergeTasksAndLogs(newCase, cases) - _ ← mergeArtifactsAndJobs(newCase, cases) - _ ← markCaseAsDuplicated(cases, newCase) - _ ← Future.traverse(cases)(caze ⇒ taskSrv.closeTasksOfCase(caze.id)) + cases <- Future.sequence(caseIds.map(caseSrv.get)) + newCase <- mergeCases(cases) + _ <- mergeTasksAndLogs(newCase, cases) + _ <- mergeArtifactsAndJobs(newCase, cases) + _ <- markCaseAsDuplicated(cases, newCase) + _ <- Future.traverse(cases)(caze => taskSrv.closeTasksOfCase(caze.id)) } yield newCase } diff --git a/thehive-backend/app/services/CaseSrv.scala b/thehive-backend/app/services/CaseSrv.scala index a2237005f1..5567b11850 100644 --- a/thehive-backend/app/services/CaseSrv.scala +++ b/thehive-backend/app/services/CaseSrv.scala @@ -39,8 +39,8 @@ class CaseSrv @Inject()( def applyTemplate(template: CaseTemplate, originalFields: Fields): Fields = { def getJsObjectOrEmpty(value: Option[JsValue]) = value.fold(JsObject.empty) { - case obj: JsObject ⇒ obj - case _ ⇒ JsObject.empty + case obj: JsObject => obj + case _ => JsObject.empty } val metrics = originalFields.getValue("metrics").fold(JsObject.empty)(_.as[JsObject]) deepMerge template.metrics().as[JsObject] @@ -48,7 +48,7 @@ class CaseSrv @Inject()( val customFields = getJsObjectOrEmpty(template.customFields()) ++ getJsObjectOrEmpty(originalFields.getValue("customFields")) originalFields - .set("title", originalFields.getString("title").map(t ⇒ JsString(template.titlePrefix().getOrElse("") + " " + t))) + .set("title", originalFields.getString("title").map(t => JsString(template.titlePrefix().getOrElse("") + " " + t))) .set("description", originalFields.getString("description").orElse(template.description()).map(JsString)) .set("severity", originalFields.getLong("severity").orElse(template.severity()).map(JsNumber(_))) .set("tags", JsArray(tags.map(JsString))) @@ -60,21 +60,21 @@ class CaseSrv @Inject()( def create(fields: Fields, template: Option[CaseTemplate] = None)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Case] = { val fieldsWithOwner = fields.getString("owner") match { - case None ⇒ fields.set("owner", authContext.userId) - case Some(_) ⇒ fields + case None => fields.set("owner", authContext.userId) + case Some(_) => fields } val templatedCaseFields = template match { - case None ⇒ fieldsWithOwner - case Some(t) ⇒ applyTemplate(t, fieldsWithOwner) + case None => fieldsWithOwner + case Some(t) => applyTemplate(t, fieldsWithOwner) } createSrv[CaseModel, Case](caseModel, templatedCaseFields.unset("tasks")) - .flatMap { caze ⇒ + .flatMap { caze => val taskFields = fields.getValues("tasks").collect { - case task: JsObject ⇒ Fields(task) + case task: JsObject => Fields(task) } ++ template.map(_.tasks().map(Fields(_))).getOrElse(Nil) taskSrv .create(caze, taskFields) - .map(_ ⇒ caze) + .map(_ => caze) } } @@ -94,7 +94,8 @@ class CaseSrv @Inject()( updateSrv(caze, fields, modifyConfig) def bulkUpdate(ids: Seq[String], fields: Fields, modifyConfig: ModifyConfig = ModifyConfig.default)( - implicit authContext: AuthContext, ec: ExecutionContext + implicit authContext: AuthContext, + ec: ExecutionContext ): Future[Seq[Try[Case]]] = updateSrv[CaseModel, Case](caseModel, ids, fields, modifyConfig) @@ -107,17 +108,17 @@ class CaseSrv @Inject()( def realDelete(caze: Case)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Unit] = { import org.elastic4play.services.QueryDSL._ for { - _ ← taskSrv + _ <- taskSrv .find(withParent(caze), Some("all"), Nil) ._1 .mapAsync(1)(taskSrv.realDelete) .runWith(Sink.ignore) - _ ← artifactSrv + _ <- artifactSrv .find(withParent(caze), Some("all"), Nil) ._1 .mapAsync(1)(artifactSrv.realDelete) .runWith(Sink.ignore) - _ ← auditSrv + _ <- auditSrv .findFor(caze, Some("all"), Nil) ._1 .mapAsync(1)(auditSrv.realDelete) @@ -126,7 +127,7 @@ class CaseSrv @Inject()( .find("case" ~= caze.id, Some("all"), Nil) ._1 .mapAsync(1)(alertSrv.unsetCase(_)) - _ ← deleteSrv.realDelete(caze) + _ <- deleteSrv.realDelete(caze) } yield () } @@ -138,11 +139,11 @@ class CaseSrv @Inject()( def getStats(id: String)(implicit ec: ExecutionContext): Future[JsObject] = { import org.elastic4play.services.QueryDSL._ for { - taskStats ← taskSrv.stats( + taskStats <- taskSrv.stats( and(withParent("case", id), "status" in ("Waiting", "InProgress", "Completed")), Seq(groupByField("status", selectCount)) ) - artifactStats ← findSrv(artifactModel, and(withParent("case", id), "status" ~= "Ok"), groupByField("status", selectCount)) + artifactStats <- findSrv(artifactModel, and(withParent("case", id), "status" ~= "Ok"), groupByField("status", selectCount)) } yield Json.obj(("tasks", taskStats), ("artifacts", artifactStats)) } @@ -154,18 +155,18 @@ class CaseSrv @Inject()( Some("all"), Nil )._1 - .flatMapConcat { artifact ⇒ + .flatMapConcat { artifact => artifactSrv.findSimilar(artifact, Some("all"), Nil)._1 } - .fold(Map.empty[String, List[Artifact]]) { (similarCases, artifact) ⇒ + .fold(Map.empty[String, List[Artifact]]) { (similarCases, artifact) => val caseId = artifact.parentId.getOrElse(sys.error(s"Artifact ${artifact.id} has no case !")) val artifactList = artifact :: similarCases.getOrElse(caseId, Nil) - similarCases + (caseId → artifactList) + similarCases + (caseId -> artifactList) } .mapConcat(identity) .mapAsyncUnordered(5) { - case (caseId, artifacts) ⇒ getSrv[CaseModel, Case](caseModel, caseId) map (_ → artifacts) + case (caseId, artifacts) => getSrv[CaseModel, Case](caseModel, caseId) map (_ -> artifacts) } - .mapMaterializedValue(_ ⇒ NotUsed) + .mapMaterializedValue(_ => NotUsed) } } diff --git a/thehive-backend/app/services/CaseTemplateSrv.scala b/thehive-backend/app/services/CaseTemplateSrv.scala index c9876da2f0..69e17695a9 100644 --- a/thehive-backend/app/services/CaseTemplateSrv.scala +++ b/thehive-backend/app/services/CaseTemplateSrv.scala @@ -48,6 +48,8 @@ class CaseTemplateSrv @Inject()( def delete(id: String)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Unit] = deleteSrv.realDelete[CaseTemplateModel, CaseTemplate](caseTemplateModel, id) - def find(queryDef: QueryDef, range: Option[String], sortBy: Seq[String])(implicit ec: ExecutionContext): (Source[CaseTemplate, NotUsed], Future[Long]) = + def find(queryDef: QueryDef, range: Option[String], sortBy: Seq[String])( + implicit ec: ExecutionContext + ): (Source[CaseTemplate, NotUsed], Future[Long]) = findSrv[CaseTemplateModel, CaseTemplate](caseTemplateModel, queryDef, range, sortBy) } diff --git a/thehive-backend/app/services/CustomWSAPI.scala b/thehive-backend/app/services/CustomWSAPI.scala index f27e5a066d..fabc126f3b 100644 --- a/thehive-backend/app/services/CustomWSAPI.scala +++ b/thehive-backend/app/services/CustomWSAPI.scala @@ -19,7 +19,7 @@ object CustomWSAPI { .parse() def parseProxyConfig(config: Configuration): Option[WSProxyServer] = - config.getOptional[Configuration]("play.ws.proxy").map { proxyConfig ⇒ + config.getOptional[Configuration]("play.ws.proxy").map { proxyConfig => DefaultWSProxyServer( proxyConfig.get[String]("host"), proxyConfig.get[Int]("port"), @@ -35,7 +35,7 @@ object CustomWSAPI { def getWS(config: Configuration)(implicit mat: Materializer): AhcWSClient = { val clientConfig = parseWSConfig(config) val clientConfigWithTruststore = config.getOptional[String]("play.cert") match { - case Some(p) ⇒ + case Some(p) => logger.warn("""Use of "cert" parameter in configuration file is deprecated. Please use: | ws.ssl { | trustManager = { @@ -67,7 +67,7 @@ object CustomWSAPI { ) ) ) - case None ⇒ clientConfig + case None => clientConfig } AhcWSClient(clientConfigWithTruststore, None) } @@ -110,7 +110,7 @@ class CustomWSAPI( try { new CustomWSAPI(Configuration(subConfig.underlying.atKey("play").withFallback(config.underlying)), environment, lifecycle, mat) } catch { - case NonFatal(e) ⇒ + case NonFatal(e) => logger.error(s"WSAPI configuration error, use default values", e) this } diff --git a/thehive-backend/app/services/DashboardSrv.scala b/thehive-backend/app/services/DashboardSrv.scala index 710da6a14b..4fecbef43b 100644 --- a/thehive-backend/app/services/DashboardSrv.scala +++ b/thehive-backend/app/services/DashboardSrv.scala @@ -42,13 +42,18 @@ class DashboardSrv @Inject()( def update(dashboard: Dashboard, fields: Fields)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Dashboard] = update(dashboard, fields, ModifyConfig.default) - def update(dashboard: Dashboard, fields: Fields, modifyConfig: ModifyConfig)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Dashboard] = + def update(dashboard: Dashboard, fields: Fields, modifyConfig: ModifyConfig)( + implicit authContext: AuthContext, + ec: ExecutionContext + ): Future[Dashboard] = updateSrv(dashboard, fields, modifyConfig) def delete(id: String)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Dashboard] = deleteSrv[DashboardModel, Dashboard](dashboardModel, id) - def find(queryDef: QueryDef, range: Option[String], sortBy: Seq[String])(implicit ec: ExecutionContext): (Source[Dashboard, NotUsed], Future[Long]) = + def find(queryDef: QueryDef, range: Option[String], sortBy: Seq[String])( + implicit ec: ExecutionContext + ): (Source[Dashboard, NotUsed], Future[Long]) = findSrv[DashboardModel, Dashboard](dashboardModel, queryDef, range, sortBy) def stats(queryDef: QueryDef, aggs: Seq[Agg])(implicit ec: ExecutionContext): Future[JsObject] = findSrv(dashboardModel, queryDef, aggs: _*) diff --git a/thehive-backend/app/services/JsonFormat.scala b/thehive-backend/app/services/JsonFormat.scala index 6cea8adc1b..a09f454140 100644 --- a/thehive-backend/app/services/JsonFormat.scala +++ b/thehive-backend/app/services/JsonFormat.scala @@ -4,23 +4,23 @@ import play.api.libs.json.{Json, OWrites} object JsonFormat { implicit val caseSimilarityWrites: OWrites[CaseSimilarity] = OWrites[CaseSimilarity] { - case CaseSimilarity(caze, similarIocCount, iocCount, similarArtifactCount, artifactCount) ⇒ + case CaseSimilarity(caze, similarIocCount, iocCount, similarArtifactCount, artifactCount) => Json.obj( - "id" → caze.id, - "_id" → caze.id, - "caseId" → caze.caseId(), - "title" → caze.title(), - "tags" → caze.tags(), - "status" → caze.status(), - "severity" → caze.severity(), - "resolutionStatus" → caze.resolutionStatus(), - "tlp" → caze.tlp(), - "startDate" → caze.startDate(), - "endDate" → caze.endDate(), - "similarIocCount" → similarIocCount, - "iocCount" → iocCount, - "similarArtifactCount" → similarArtifactCount, - "artifactCount" → artifactCount + "id" -> caze.id, + "_id" -> caze.id, + "caseId" -> caze.caseId(), + "title" -> caze.title(), + "tags" -> caze.tags(), + "status" -> caze.status(), + "severity" -> caze.severity(), + "resolutionStatus" -> caze.resolutionStatus(), + "tlp" -> caze.tlp(), + "startDate" -> caze.startDate(), + "endDate" -> caze.endDate(), + "similarIocCount" -> similarIocCount, + "iocCount" -> iocCount, + "similarArtifactCount" -> similarArtifactCount, + "artifactCount" -> artifactCount ) } } diff --git a/thehive-backend/app/services/KeyAuthSrv.scala b/thehive-backend/app/services/KeyAuthSrv.scala index 3bc51e585c..2d97e367b3 100644 --- a/thehive-backend/app/services/KeyAuthSrv.scala +++ b/thehive-backend/app/services/KeyAuthSrv.scala @@ -37,19 +37,19 @@ class KeyAuthSrv @Inject()(userSrv: UserSrv, implicit val ec: ExecutionContext, .filter(_.key().contains(key)) .runWith(Sink.headOption) .flatMap { - case Some(user) ⇒ userSrv.getFromUser(request, user, name) - case None ⇒ Future.failed(AuthenticationError("Authentication failure")) + case Some(user) => userSrv.getFromUser(request, user, name) + case None => Future.failed(AuthenticationError("Authentication failure")) } } override def renewKey(username: String)(implicit authContext: AuthContext): Future[String] = { val newKey = generateKey() - userSrv.update(username, Fields.empty.set("key", newKey)).map(_ ⇒ newKey) + userSrv.update(username, Fields.empty.set("key", newKey)).map(_ => newKey) } override def getKey(username: String)(implicit authContext: AuthContext): Future[String] = userSrv.get(username).map(_.key().getOrElse(throw BadRequestError(s"User $username hasn't key"))) override def removeKey(username: String)(implicit authContext: AuthContext): Future[Unit] = - userSrv.update(username, Fields.empty.set("key", JsArray())).map(_ ⇒ ()) + userSrv.update(username, Fields.empty.set("key", JsArray())).map(_ => ()) } diff --git a/thehive-backend/app/services/LocalAuthSrv.scala b/thehive-backend/app/services/LocalAuthSrv.scala index 4aafa2eda1..e194af4ccb 100644 --- a/thehive-backend/app/services/LocalAuthSrv.scala +++ b/thehive-backend/app/services/LocalAuthSrv.scala @@ -23,20 +23,20 @@ class LocalAuthSrv @Inject()(userSrv: UserSrv, implicit val ec: ExecutionContext private[services] def doAuthenticate(user: User, password: String): Boolean = user.password().map(_.split(",", 2)).fold(false) { - case Array(seed, pwd) ⇒ + case Array(seed, pwd) => val hash = Hasher("SHA-256").fromString(seed + password).head.toString hash == pwd - case _ ⇒ false + case _ => false } override def authenticate(username: String, password: String)(implicit request: RequestHeader): Future[AuthContext] = - userSrv.get(username).flatMap { user ⇒ + userSrv.get(username).flatMap { user => if (doAuthenticate(user, password)) userSrv.getFromUser(request, user, name) else Future.failed(AuthenticationError("Authentication failure")) } override def changePassword(username: String, oldPassword: String, newPassword: String)(implicit authContext: AuthContext): Future[Unit] = - userSrv.get(username).flatMap { user ⇒ + userSrv.get(username).flatMap { user => if (doAuthenticate(user, oldPassword)) setPassword(username, newPassword) else Future.failed(AuthorizationError("Authentication failure")) } @@ -44,6 +44,6 @@ class LocalAuthSrv @Inject()(userSrv: UserSrv, implicit val ec: ExecutionContext override def setPassword(username: String, newPassword: String)(implicit authContext: AuthContext): Future[Unit] = { val seed = Random.nextString(10).replace(',', '!') val newHash = seed + "," + Hasher("SHA-256").fromString(seed + newPassword).head.toString - userSrv.update(username, Fields.empty.set("password", newHash)).map(_ ⇒ ()) + userSrv.update(username, Fields.empty.set("password", newHash)).map(_ => ()) } } diff --git a/thehive-backend/app/services/LogSrv.scala b/thehive-backend/app/services/LogSrv.scala index 214c383cdc..0db003ffe8 100644 --- a/thehive-backend/app/services/LogSrv.scala +++ b/thehive-backend/app/services/LogSrv.scala @@ -34,7 +34,7 @@ class LogSrv @Inject()( def create(taskId: String, fields: Fields)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Log] = getSrv[TaskModel, Task](taskModel, taskId) - .flatMap { task ⇒ + .flatMap { task => create(task, fields) } @@ -57,18 +57,18 @@ class LogSrv @Inject()( def realDelete(log: Log)(implicit ec: ExecutionContext): Future[Unit] = for { - _ ← auditSrv + _ <- auditSrv .findFor(log, Some("all"), Nil) ._1 .mapAsync(1)(auditSrv.realDelete) .runWith(Sink.ignore) - _ ← log.attachment().fold[Future[Unit]](Future.successful(())) { attachment ⇒ + _ <- log.attachment().fold[Future[Unit]](Future.successful(())) { attachment => attachmentSrv.attachmentUseCount(attachment.id).flatMap { - case 1 ⇒ attachmentSrv.delete(attachment.id) - case _ ⇒ Future.successful(()) + case 1 => attachmentSrv.delete(attachment.id) + case _ => Future.successful(()) } } - _ ← dbRemove(log) + _ <- dbRemove(log) } yield () def find(queryDef: QueryDef, range: Option[String], sortBy: Seq[String])(implicit ec: ExecutionContext): (Source[Log, NotUsed], Future[Long]) = diff --git a/thehive-backend/app/services/OAuth2Srv.scala b/thehive-backend/app/services/OAuth2Srv.scala index b2d0c6d8e2..a7a69797c7 100644 --- a/thehive-backend/app/services/OAuth2Srv.scala +++ b/thehive-backend/app/services/OAuth2Srv.scala @@ -34,15 +34,15 @@ object OAuth2Config { def apply(configuration: Configuration): Option[OAuth2Config] = for { - clientId ← configuration.getOptional[String]("auth.oauth2.clientId") - clientSecret ← configuration.getOptional[String]("auth.oauth2.clientSecret") - redirectUri ← configuration.getOptional[String]("auth.oauth2.redirectUri") - responseType ← configuration.getOptional[String]("auth.oauth2.responseType") + clientId <- configuration.getOptional[String]("auth.oauth2.clientId") + clientSecret <- configuration.getOptional[String]("auth.oauth2.clientSecret") + redirectUri <- configuration.getOptional[String]("auth.oauth2.redirectUri") + responseType <- configuration.getOptional[String]("auth.oauth2.responseType") grantType = configuration.getOptional[String]("auth.oauth2.grantType").getOrElse("authorization_code") - authorizationUrl ← configuration.getOptional[String]("auth.oauth2.authorizationUrl") - tokenUrl ← configuration.getOptional[String]("auth.oauth2.tokenUrl") - userUrl ← configuration.getOptional[String]("auth.oauth2.userUrl") - scope ← configuration.getOptional[Seq[String]]("auth.oauth2.scope") + authorizationUrl <- configuration.getOptional[String]("auth.oauth2.authorizationUrl") + tokenUrl <- configuration.getOptional[String]("auth.oauth2.tokenUrl") + userUrl <- configuration.getOptional[String]("auth.oauth2.userUrl") + scope <- configuration.getOptional[Seq[String]]("auth.oauth2.scope") authorizationHeader = configuration.getOptional[String]("auth.oauth2.authorizationHeader").getOrElse("Bearer") autocreate = configuration.getOptional[Boolean]("auth.sso.autocreate").getOrElse(false) autoupdate = configuration.getOptional[Boolean]("auth.sso.autoupdate").getOrElse(false) @@ -80,21 +80,21 @@ class OAuth2Srv( val Oauth2TokenQueryString = "code" - private def withOAuth2Config[A](body: OAuth2Config ⇒ Future[A]): Future[A] = + private def withOAuth2Config[A](body: OAuth2Config => Future[A]): Future[A] = oauth2Config.fold[Future[A]](Future.failed(AuthenticationError("OAuth2 not configured properly")))(body) override def authenticate()(implicit request: RequestHeader): Future[Either[Result, AuthContext]] = - withOAuth2Config { oauth2Config ⇒ + withOAuth2Config { oauth2Config => if (!isSecuredAuthCode(request)) { logger.debug("Code or state is not provided, redirect to authorizationUrl") Future.successful(Left(authRedirect(oauth2Config))) } else { (for { - token ← getToken(oauth2Config, request) - userData ← getUserData(oauth2Config, token) - authContext ← authenticate(oauth2Config, request, userData) + token <- getToken(oauth2Config, request) + userData <- getUserData(oauth2Config, token) + authContext <- authenticate(oauth2Config, request, userData) } yield Right(authContext)).recoverWith { - case error ⇒ Future.failed(AuthenticationError(s"OAuth2 authentication failure: ${error.getMessage}")) + case error => Future.failed(AuthenticationError(s"OAuth2 authentication failure: ${error.getMessage}")) } } } @@ -110,17 +110,17 @@ class OAuth2Srv( private def authRedirect(oauth2Config: OAuth2Config): Result = { val state = UUID.randomUUID().toString val queryStringParams = Map[String, Seq[String]]( - "scope" → Seq(oauth2Config.scope.mkString(" ")), - "response_type" → Seq(oauth2Config.responseType), - "redirect_uri" → Seq(oauth2Config.redirectUri), - "client_id" → Seq(oauth2Config.clientId), - "state" → Seq(state) + "scope" -> Seq(oauth2Config.scope.mkString(" ")), + "response_type" -> Seq(oauth2Config.responseType), + "redirect_uri" -> Seq(oauth2Config.redirectUri), + "client_id" -> Seq(oauth2Config.clientId), + "state" -> Seq(state) ) logger.debug(s"Redirecting to ${oauth2Config.redirectUri} with $queryStringParams and state $state") Results .Redirect(oauth2Config.authorizationUrl, queryStringParams, status = 302) - .withSession("state" → state) + .withSession("state" -> state) } /** @@ -131,18 +131,18 @@ class OAuth2Srv( private def getToken[A](oauth2Config: OAuth2Config, request: RequestHeader): Future[String] = { val token = for { - state ← request.session.get("state") - stateQs ← request.queryString.get("state").flatMap(_.headOption) + state <- request.session.get("state") + stateQs <- request.queryString.get("state").flatMap(_.headOption) if state == stateQs } yield request.queryString.get("code").flatMap(_.headOption) match { - case Some(code) ⇒ + case Some(code) => logger.debug(s"Attempting to retrieve OAuth2 token from ${oauth2Config.tokenUrl} with code $code") getAuthTokenFromCode(oauth2Config, code, state) - .map { t ⇒ + .map { t => logger.trace(s"Got token $t") t } - case None ⇒ + case None => Future.failed(AuthenticationError(s"OAuth2 server code missing ${request.queryString.get("error")}")) } token.getOrElse(Future.failed(BadRequestError("OAuth2 states mismatch"))) @@ -164,21 +164,21 @@ class OAuth2Srv( | state: $state |""".stripMargin) ws.url(oauth2Config.tokenUrl) - .withHttpHeaders("Accept" → "application/json") + .withHttpHeaders("Accept" -> "application/json") .post( Map( - "code" → code, - "grant_type" → oauth2Config.grantType, - "client_secret" → oauth2Config.clientSecret, - "redirect_uri" → oauth2Config.redirectUri, - "client_id" → oauth2Config.clientId, - "state" → state + "code" -> code, + "grant_type" -> oauth2Config.grantType, + "client_secret" -> oauth2Config.clientSecret, + "redirect_uri" -> oauth2Config.redirectUri, + "client_id" -> oauth2Config.clientId, + "state" -> state ) ) .transform { - case Success(r) if r.status == 200 ⇒ Success((r.json \ "access_token").asOpt[String].getOrElse("")) - case Failure(error) ⇒ Failure(AuthenticationError(s"OAuth2 token verification failure ${error.getMessage}")) - case Success(r) ⇒ Failure(AuthenticationError(s"OAuth2/token unexpected response from server (${r.status} ${r.statusText})")) + case Success(r) if r.status == 200 => Success((r.json \ "access_token").asOpt[String].getOrElse("")) + case Failure(error) => Failure(AuthenticationError(s"OAuth2 token verification failure ${error.getMessage}")) + case Success(r) => Failure(AuthenticationError(s"OAuth2/token unexpected response from server (${r.status} ${r.statusText})")) } } @@ -190,39 +190,39 @@ class OAuth2Srv( private def getUserData(oauth2Config: OAuth2Config, token: String): Future[JsObject] = { logger.trace(s"Request to ${oauth2Config.userUrl} with authorization header: ${oauth2Config.authorizationHeader} $token") ws.url(oauth2Config.userUrl) - .addHttpHeaders("Authorization" → s"${oauth2Config.authorizationHeader} $token") + .addHttpHeaders("Authorization" -> s"${oauth2Config.authorizationHeader} $token") .get() .transform { - case Success(r) if r.status == 200 ⇒ Success(r.json.as[JsObject]) - case Failure(error) ⇒ Failure(AuthenticationError(s"OAuth2 user data fetch failure ${error.getMessage}")) - case Success(r) ⇒ Failure(AuthenticationError(s"OAuth2/userinfo unexpected response from server (${r.status} ${r.statusText})")) + case Success(r) if r.status == 200 => Success(r.json.as[JsObject]) + case Failure(error) => Failure(AuthenticationError(s"OAuth2 user data fetch failure ${error.getMessage}")) + case Success(r) => Failure(AuthenticationError(s"OAuth2/userinfo unexpected response from server (${r.status} ${r.statusText})")) } } private def authenticate(oauth2Config: OAuth2Config, request: RequestHeader, userData: JsObject): Future[AuthContext] = for { - userFields ← ssoMapper.getUserFields(userData) - login ← userFields.getString("login").fold(Future.failed[String](AuthenticationError("")))(Future.successful) - user ← userSrv + userFields <- ssoMapper.getUserFields(userData) + login <- userFields.getString("login").fold(Future.failed[String](AuthenticationError("")))(Future.successful) + user <- userSrv .get(login) .flatMap { - case u if oauth2Config.autoupdate ⇒ + case u if oauth2Config.autoupdate => logger.debug(s"Updating OAuth/OIDC user") - userSrv.inInitAuthContext { implicit authContext ⇒ + userSrv.inInitAuthContext { implicit authContext => // Only update name and roles, not login (can't change it) userSrv .update(u, userFields.unset("login")) } - case u ⇒ Future.successful(u) + case u => Future.successful(u) } .recoverWith { - case _: NotFoundError if oauth2Config.autocreate ⇒ + case _: NotFoundError if oauth2Config.autocreate => logger.debug(s"Creating OAuth/OIDC user") - userSrv.inInitAuthContext { implicit authContext ⇒ + userSrv.inInitAuthContext { implicit authContext => userSrv.create(userFields.set("login", userFields.getString("login").get.toLowerCase)) } } - authContext ← userSrv.getFromUser(request, user, name) + authContext <- userSrv.getFromUser(request, user, name) } yield authContext } diff --git a/thehive-backend/app/services/StreamMessage.scala b/thehive-backend/app/services/StreamMessage.scala index db679edb6a..5b86072b2e 100644 --- a/thehive-backend/app/services/StreamMessage.scala +++ b/thehive-backend/app/services/StreamMessage.scala @@ -21,13 +21,13 @@ case class AggregatedAuditMessage(auxSrv: AuxSrv, message: Future[JsObject], sum val modelSummary = summary.getOrElse(operation.entity.model.modelName, Map.empty[String, Int]) val actionCount = modelSummary.getOrElse(operation.action.toString, 0) copy( - summary = summary + (operation.entity.model.modelName → (modelSummary + - (operation.action.toString → (actionCount + 1)))) + summary = summary + (operation.entity.model.modelName -> (modelSummary + + (operation.action.toString -> (actionCount + 1)))) ) } - def toJson(implicit ec: ExecutionContext): Future[JsObject] = message.map { msg ⇒ - Json.obj("base" → msg, "summary" → summary) + def toJson(implicit ec: ExecutionContext): Future[JsObject] = message.map { msg => + Json.obj("base" -> msg, "summary" -> summary) } } @@ -38,26 +38,26 @@ object AggregatedAuditMessage { // First operation of the group val msg = auxSrv(operation.entity, 10, withStats = false, removeUnaudited = true) .recover { - case error ⇒ + case error => logger.error("auxSrv fails", error) JsObject.empty } - .map { obj ⇒ + .map { obj => Json.obj( - "objectId" → operation.entity.id, - "objectType" → operation.entity.model.modelName, - "operation" → operation.action, - "startDate" → operation.date, - "rootId" → operation.entity.routing, - "user" → operation.authContext.userId, - "createdBy" → operation.authContext.userId, - "createdAt" → operation.date, - "requestId" → operation.authContext.requestId, - "object" → obj, - "details" → operation.details + "objectId" -> operation.entity.id, + "objectType" -> operation.entity.model.modelName, + "operation" -> operation.action, + "startDate" -> operation.date, + "rootId" -> operation.entity.routing, + "user" -> operation.authContext.userId, + "createdBy" -> operation.authContext.userId, + "createdAt" -> operation.date, + "requestId" -> operation.authContext.requestId, + "object" -> obj, + "details" -> operation.details ) } - new AggregatedAuditMessage(auxSrv, msg, Map(operation.entity.model.modelName → Map(operation.action.toString → 1))) + new AggregatedAuditMessage(auxSrv, msg, Map(operation.entity.model.modelName -> Map(operation.action.toString -> 1))) } } @@ -71,7 +71,7 @@ case class AggregatedMigrationMessage(tableName: String, current: Long, total: L def toJson(implicit ec: ExecutionContext): Future[JsObject] = Future.successful( - Json.obj("base" → Json.obj("rootId" → current, "objectType" → "migration", "tableName" → tableName, "current" → current, "total" → total)) + Json.obj("base" -> Json.obj("rootId" -> current, "objectType" -> "migration", "tableName" -> tableName, "current" -> current, "total" -> total)) ) } diff --git a/thehive-backend/app/services/StreamSerializer.scala b/thehive-backend/app/services/StreamSerializer.scala index cddfb1ecfb..89c5efbd7b 100644 --- a/thehive-backend/app/services/StreamSerializer.scala +++ b/thehive-backend/app/services/StreamSerializer.scala @@ -18,10 +18,10 @@ class StreamSerializer extends Serializer { */ def toBinary(o: AnyRef): Array[Byte] = o match { - case GetOperations ⇒ "GetOperations".getBytes - case StreamMessages(msg) ⇒ JsArray(msg).toString.getBytes - case Submit ⇒ "Submit".getBytes - case _ ⇒ Array.empty[Byte] // Not serializable + case GetOperations => "GetOperations".getBytes + case StreamMessages(msg) => JsArray(msg).toString.getBytes + case Submit => "Submit".getBytes + case _ => Array.empty[Byte] // Not serializable } /** @@ -31,8 +31,8 @@ class StreamSerializer extends Serializer { @throws(classOf[NotSerializableException]) def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = new String(bytes) match { - case "GetOperations" ⇒ GetOperations - case "Submit" ⇒ Submit - case s ⇒ Try(StreamMessages(Json.parse(s).as[Seq[JsObject]])).getOrElse(throw new NotSerializableException) + case "GetOperations" => GetOperations + case "Submit" => Submit + case s => Try(StreamMessages(Json.parse(s).as[Seq[JsObject]])).getOrElse(throw new NotSerializableException) } } diff --git a/thehive-backend/app/services/StreamSrv.scala b/thehive-backend/app/services/StreamSrv.scala index 6cca5fb54b..a389a6e4c4 100644 --- a/thehive-backend/app/services/StreamSrv.scala +++ b/thehive-backend/app/services/StreamSrv.scala @@ -53,88 +53,88 @@ class LocalStreamActor @Inject()(eventSrv: EventSrv, auxSrv: AuxSrv) extends Act def unapply(msg: Any): Option[AuditOperation] = msg match { - case ao: AuditOperation ⇒ + case ao: AuditOperation => ao.entity.model match { - case am: AuditedModel ⇒ Some(ao.copy(details = am.selectAuditedAttributes(ao.details))) - case _ ⇒ None + case am: AuditedModel => Some(ao.copy(details = am.selectAuditedAttributes(ao.details))) + case _ => None } - case _ ⇒ None + case _ => None } } object RequestStart { def unapply(msg: Any): Option[String] = msg match { - case RequestProcessStart(request) ⇒ Some(Instance.getRequestId(request)) - case InternalRequestProcessStart(requestId) ⇒ Some(requestId) - case _ ⇒ None + case RequestProcessStart(request) => Some(Instance.getRequestId(request)) + case InternalRequestProcessStart(requestId) => Some(requestId) + case _ => None } } object RequestEnd { def unapply(msg: Any): Option[String] = msg match { - case RequestProcessEnd(request, _) ⇒ Some(Instance.getRequestId(request)) - case InternalRequestProcessEnd(requestId) ⇒ Some(requestId) - case _ ⇒ None + case RequestProcessEnd(request, _) => Some(Instance.getRequestId(request)) + case InternalRequestProcessEnd(requestId) => Some(requestId) + case _ => None } } override def receive: Receive = receive(Map.empty, None) def receive(messages: Map[String, Option[AggregatedMessage[_]]], flushScheduler: Option[Cancellable]): Receive = { - case RequestStart(requestId) ⇒ + case RequestStart(requestId) => logger.trace(s"Start of request $requestId") - context.become(receive(messages + (requestId → None), None)) + context.become(receive(messages + (requestId -> None), None)) - case RequestEnd(requestId) ⇒ + case RequestEnd(requestId) => logger.trace(s"End of request $requestId") messages.get(requestId).collect { - case Some(message) ⇒ + case Some(message) => logger.trace(s"Sending $message to mediator") - message.toJson.foreach(msg ⇒ mediator ! Publish("stream", StreamMessages(Seq(msg)))) + message.toJson.foreach(msg => mediator ! Publish("stream", StreamMessages(Seq(msg)))) } context.become(receive(messages - requestId, None)) - case NormalizedOperation(operation) ⇒ + case NormalizedOperation(operation) => val requestId = operation.authContext.requestId logger.trace(s"Receiving audit operation from request $requestId: $operation") messages.get(requestId) match { - case None ⇒ + case None => logger.debug("Operation that comes after the end of request, send it to stream actor") - AggregatedAuditMessage(auxSrv, operation).toJson.foreach(msg ⇒ mediator ! Publish("stream", StreamMessages(Seq(msg)))) - case Some(None) ⇒ + AggregatedAuditMessage(auxSrv, operation).toJson.foreach(msg => mediator ! Publish("stream", StreamMessages(Seq(msg)))) + case Some(None) => logger.debug("First operation of the request, creating operation group") - context.become(receive(messages + (requestId → Some(AggregatedAuditMessage(auxSrv, operation))), None)) - case Some(Some(aam: AggregatedAuditMessage)) ⇒ + context.become(receive(messages + (requestId -> Some(AggregatedAuditMessage(auxSrv, operation))), None)) + case Some(Some(aam: AggregatedAuditMessage)) => logger.debug("Operation included in existing group") - context.become(receive(messages + (requestId → Some(aam.add(operation))), None)) - case _ ⇒ + context.become(receive(messages + (requestId -> Some(aam.add(operation))), None)) + case _ => logger.debug("Impossible") sys.error("") } /* Migration process event */ - case event: MigrationEvent ⇒ + case event: MigrationEvent => val newMessage = messages.get(event.modelName).flatten match { - case Some(m: AggregatedMigrationMessage) ⇒ m.add(event) - case None ⇒ AggregatedMigrationMessage(event) - case _ ⇒ sys.error("impossible") + case Some(m: AggregatedMigrationMessage) => m.add(event) + case None => AggregatedMigrationMessage(event) + case _ => sys.error("impossible") } // automatically flush messages after 1s val newFlushScheduler = flushScheduler.getOrElse(context.system.scheduler.scheduleOnce(1.second, self, Submit)) - context.become(receive(messages + (event.modelName → Some(newMessage)), Some(newFlushScheduler))) + context.become(receive(messages + (event.modelName -> Some(newMessage)), Some(newFlushScheduler))) /* Database migration has just finished */ - case EndOfMigrationEvent ⇒ + case EndOfMigrationEvent => flushScheduler.foreach(_.cancel()) self ! Submit - context.become(receive(messages + ("end" → Some(AggregatedMigrationMessage.endOfMigration)), None)) + context.become(receive(messages + ("end" -> Some(AggregatedMigrationMessage.endOfMigration)), None)) - case Submit ⇒ + case Submit => Future .traverse(messages.values.flatten)(_.toJson) - .foreach(message ⇒ mediator ! Publish("stream", StreamMessages(message.toSeq))) + .foreach(message => mediator ! Publish("stream", StreamMessages(message.toSeq))) context.become(receive(Map.empty, None)) } } @@ -174,16 +174,16 @@ class StreamActor(cacheExpiration: FiniteDuration, refresh: FiniteDuration) exte val timeout = context.system.scheduler.scheduleOnce(refresh, self, Submit) { - case sm: StreamMessages ⇒ + case sm: StreamMessages => logger.debug(s"receive stream message $sm") waitingRequest ! sm timeout.cancel() context.become(receive) - case Submit ⇒ + case Submit => waitingRequest ! StreamMessages.empty timeout.cancel() context.become(receive) - case GetOperations ⇒ + case GetOperations => waitingRequest ! StreamMessages.empty timeout.cancel() context.become(receive(sender)) @@ -191,16 +191,16 @@ class StreamActor(cacheExpiration: FiniteDuration, refresh: FiniteDuration) exte } private def receive(waitingMessages: Seq[JsObject]): Receive = { - case GetOperations ⇒ + case GetOperations => sender ! StreamMessages(waitingMessages) renewExpiration() context.become(receive) - case StreamMessages(msg) ⇒ + case StreamMessages(msg) => context.become(receive(waitingMessages ++ msg)) } def receive: Receive = { - case StreamMessages(msg) ⇒ context.become(receive(msg)) - case GetOperations ⇒ context.become(receive(sender)) + case StreamMessages(msg) => context.become(receive(msg)) + case GetOperations => context.become(receive(sender)) } } diff --git a/thehive-backend/app/services/TaskSrv.scala b/thehive-backend/app/services/TaskSrv.scala index d9c8f755ae..f86676eca0 100644 --- a/thehive-backend/app/services/TaskSrv.scala +++ b/thehive-backend/app/services/TaskSrv.scala @@ -33,7 +33,7 @@ class TaskSrv @Inject()( def create(caseId: String, fields: Fields)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Task] = getSrv[CaseModel, Case](caseModel, caseId) - .flatMap { caze ⇒ + .flatMap { caze => create(caze, fields) } @@ -42,12 +42,12 @@ class TaskSrv @Inject()( def create(caseId: String, fields: Seq[Fields])(implicit authContext: AuthContext, ec: ExecutionContext): Future[Seq[Try[Task]]] = getSrv[CaseModel, Case](caseModel, caseId) - .flatMap { caze ⇒ + .flatMap { caze => create(caze, fields) } def create(caze: Case, fields: Seq[Fields])(implicit authContext: AuthContext, ec: ExecutionContext): Future[Seq[Try[Task]]] = - createSrv[TaskModel, Task, Case](taskModel, fields.map(caze → _)) + createSrv[TaskModel, Task, Case](taskModel, fields.map(caze -> _)) def get(id: String)(implicit ec: ExecutionContext): Future[Task] = getSrv[TaskModel, Task](taskModel, id) @@ -57,7 +57,7 @@ class TaskSrv @Inject()( def update(id: String, fields: Fields, modifyConfig: ModifyConfig)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Task] = getSrv[TaskModel, Task](taskModel, id) - .flatMap { task ⇒ + .flatMap { task => update(task, fields, modifyConfig) } @@ -86,11 +86,11 @@ class TaskSrv @Inject()( find(filter, range, Nil) ._1 .map { - case task if task.status() == TaskStatus.Waiting ⇒ (task, cancelTask) - case task ⇒ (task, completeTask) + case task if task.status() == TaskStatus.Waiting => (task, cancelTask) + case task => (task, completeTask) } .runWith(Sink.seq) - .flatMap { taskUpdate ⇒ + .flatMap { taskUpdate => updateSrv(taskUpdate, ModifyConfig.default) } } @@ -101,17 +101,17 @@ class TaskSrv @Inject()( def realDelete(task: Task)(implicit ec: ExecutionContext): Future[Unit] = { import org.elastic4play.services.QueryDSL._ for { - _ ← auditSrv + _ <- auditSrv .findFor(task, Some("all"), Nil) ._1 .mapAsync(1)(auditSrv.realDelete) .runWith(Sink.ignore) - _ ← logSrv + _ <- logSrv .find(withParent(task), Some("all"), Nil) ._1 .mapAsync(1)(logSrv.realDelete) .runWith(Sink.ignore) - _ ← dbRemove(task) + _ <- dbRemove(task) } yield () } diff --git a/thehive-backend/app/services/TheHiveAuthSrv.scala b/thehive-backend/app/services/TheHiveAuthSrv.scala index a49224aedb..18d98ff528 100644 --- a/thehive-backend/app/services/TheHiveAuthSrv.scala +++ b/thehive-backend/app/services/TheHiveAuthSrv.scala @@ -15,7 +15,7 @@ object TheHiveAuthSrv { def getAuthSrv(authTypes: Seq[String], authModules: immutable.Set[AuthSrv]): Seq[AuthSrv] = ("key" +: authTypes.filterNot(_ == "key")) - .flatMap { authType ⇒ + .flatMap { authType => authModules .find(_.name == authType) .orElse { diff --git a/thehive-backend/app/services/UserSrv.scala b/thehive-backend/app/services/UserSrv.scala index 6c6573726b..2f79f43b46 100644 --- a/thehive-backend/app/services/UserSrv.scala +++ b/thehive-backend/app/services/UserSrv.scala @@ -11,7 +11,7 @@ import models.{Roles, User, UserModel, UserStatus} import org.elastic4play.controllers.Fields import org.elastic4play.database.{DBIndex, ModifyConfig} -import org.elastic4play.services.{User ⇒ EUser, UserSrv ⇒ EUserSrv, _} +import org.elastic4play.services.{User => EUser, UserSrv => EUserSrv, _} import org.elastic4play.utils.Instance import org.elastic4play.{AuthenticationError, AuthorizationError} @@ -33,45 +33,47 @@ class UserSrv @Inject()( override def getFromId(request: RequestHeader, userId: String, authMethod: String): Future[AuthContext] = getSrv[UserModel, User](userModel, userId)(defaultExecutionContext) - .flatMap { user ⇒ + .flatMap { user => getFromUser(request, user, authMethod) }(defaultExecutionContext) override def getFromUser(request: RequestHeader, user: EUser, authMethod: String): Future[AuthContext] = user match { - case u: User if u.status() == UserStatus.Ok ⇒ + case u: User if u.status() == UserStatus.Ok => Future.successful(AuthContextImpl(user.id, user.getUserName, Instance.getRequestId(request), user.getRoles, authMethod)) - case _ ⇒ Future.failed(AuthorizationError("Your account is locked")) + case _ => Future.failed(AuthorizationError("Your account is locked")) } override def getInitialUser(request: RequestHeader): Future[AuthContext] = - dbIndex.getSize(userModel.modelName)(defaultExecutionContext).map { - case size if size > 0 ⇒ throw AuthenticationError(s"Use of initial user is forbidden because users exist in database") - case _ ⇒ AuthContextImpl("init", "", Instance.getRequestId(request), Seq(Roles.admin, Roles.read, Roles.alert), "init") - }(defaultExecutionContext) + dbIndex + .getSize(userModel.modelName)(defaultExecutionContext) + .map { + case size if size > 0 => throw AuthenticationError(s"Use of initial user is forbidden because users exist in database") + case _ => AuthContextImpl("init", "", Instance.getRequestId(request), Seq(Roles.admin, Roles.read, Roles.alert), "init") + }(defaultExecutionContext) - override def inInitAuthContext[A](block: AuthContext ⇒ Future[A]): Future[A] = { + override def inInitAuthContext[A](block: AuthContext => Future[A]): Future[A] = { val authContext = AuthContextImpl("init", "", Instance.getInternalId, Seq(Roles.admin, Roles.read, Roles.alert), "init") eventSrv.publish(InternalRequestProcessStart(authContext.requestId)) block(authContext).andThen { - case _ ⇒ eventSrv.publish(InternalRequestProcessEnd(authContext.requestId)) + case _ => eventSrv.publish(InternalRequestProcessEnd(authContext.requestId)) }(defaultExecutionContext) } - def extraAuthContext[A](block: AuthContext ⇒ Future[A])(implicit authContext: AuthContext): Future[A] = { + def extraAuthContext[A](block: AuthContext => Future[A])(implicit authContext: AuthContext): Future[A] = { val ac = AuthContextImpl(authContext.userId, authContext.userName, Instance.getInternalId, authContext.roles, "init") eventSrv.publish(InternalRequestProcessStart(ac.requestId)) block(ac).andThen { - case _ ⇒ eventSrv.publish(InternalRequestProcessEnd(ac.requestId)) + case _ => eventSrv.publish(InternalRequestProcessEnd(ac.requestId)) }(defaultExecutionContext) } def create(fields: Fields)(implicit authContext: AuthContext, ec: ExecutionContext): Future[User] = fields.getString("password") match { - case None ⇒ createSrv[UserModel, User](userModel, fields) - case Some(password) ⇒ - createSrv[UserModel, User](userModel, fields.unset("password")).flatMap { user ⇒ - authSrv.get.setPassword(user.userId(), password).map(_ ⇒ user) + case None => createSrv[UserModel, User](userModel, fields) + case Some(password) => + createSrv[UserModel, User](userModel, fields.unset("password")).flatMap { user => + authSrv.get.setPassword(user.userId(), password).map(_ => user) } } diff --git a/thehive-backend/app/services/WebHook.scala b/thehive-backend/app/services/WebHook.scala index 78b85a59da..8ef6b3ef9b 100644 --- a/thehive-backend/app/services/WebHook.scala +++ b/thehive-backend/app/services/WebHook.scala @@ -16,32 +16,32 @@ case class WebHook(name: String, ws: WSRequest)(implicit ec: ExecutionContext) { private[WebHook] lazy val logger = Logger(getClass.getName + "." + name) def send(obj: JsObject): Unit = ws.post(obj).onComplete { - case Success(resp) if resp.status / 100 != 2 ⇒ logger.error(s"WebHook returns status ${resp.status} ${resp.statusText}") - case Failure(_: ConnectException) ⇒ logger.error(s"Connection to WebHook $name error") - case Failure(error) ⇒ logger.error("WebHook call error", error) - case _ ⇒ + case Success(resp) if resp.status / 100 != 2 => logger.error(s"WebHook returns status ${resp.status} ${resp.statusText}") + case Failure(_: ConnectException) => logger.error(s"Connection to WebHook $name error") + case Failure(error) => logger.error("WebHook call error", error) + case _ => } } class WebHooks(webhooks: Seq[WebHook], auxSrv: AuxSrv, implicit val ec: ExecutionContext) { @Inject() def this(configuration: Configuration, globalWS: CustomWSAPI, auxSrv: AuxSrv, ec: ExecutionContext) = { this(for { - cfg ← configuration.getOptional[Configuration]("webhooks").toSeq + cfg <- configuration.getOptional[Configuration]("webhooks").toSeq whWS = globalWS.withConfig(cfg) - name ← cfg.subKeys - whConfig ← Try(cfg.get[Configuration](name)).toOption - url ← whConfig.getOptional[String]("url") + name <- cfg.subKeys + whConfig <- Try(cfg.get[Configuration](name)).toOption + url <- whConfig.getOptional[String]("url") instanceWS = whWS.withConfig(whConfig).url(url) } yield WebHook(name, instanceWS)(ec), auxSrv, ec) } def send(obj: JsObject): Unit = (for { - objectType ← (obj \ "objectType").asOpt[String] - objectId ← (obj \ "objectId").asOpt[String] + objectType <- (obj \ "objectType").asOpt[String] + objectId <- (obj \ "objectId").asOpt[String] } yield auxSrv(objectType, objectId, nparent = 0, withStats = false, removeUnaudited = false)) .getOrElse(Future.successful(JsObject.empty)) - .map(o ⇒ obj + ("object" → o)) + .map(o => obj + ("object" -> o)) .fallbackTo(Future.successful(obj)) - .foreach(o ⇒ webhooks.foreach(_.send(o))) + .foreach(o => webhooks.foreach(_.send(o))) } diff --git a/thehive-backend/app/services/mappers/GroupUserMapper.scala b/thehive-backend/app/services/mappers/GroupUserMapper.scala index 0ec85c1e3a..87272bc724 100644 --- a/thehive-backend/app/services/mappers/GroupUserMapper.scala +++ b/thehive-backend/app/services/mappers/GroupUserMapper.scala @@ -46,26 +46,26 @@ class GroupUserMapper( def expr: Parser[Seq[String]] = "[" ~ opt(realStr ~ rep("," ~ realStr)) ~ "]" ^^ { - case _ ~ Some(firstRole ~ list) ~ _ ⇒ + case _ ~ Some(firstRole ~ list) ~ _ => list.foldLeft(Seq(firstRole)) { - case (queue, _ ~ role) ⇒ role +: queue + case (queue, _ ~ role) => role +: queue } - case _ ~ _ ⇒ Seq.empty[String] + case _ ~ _ => Seq.empty[String] } | opt(realStr) ^^ { - case Some(role) ⇒ Seq(role) - case None ⇒ Seq.empty[String] + case Some(role) => Seq(role) + case None => Seq.empty[String] } } override def getUserFields(jsValue: JsValue, authHeader: Option[(String, String)]): Future[Fields] = groupsUrl match { - case Some(groupsEndpointUrl) ⇒ + case Some(groupsEndpointUrl) => logger.debug(s"Retreiving groups from $groupsEndpointUrl") - val apiCall = authHeader.fold(ws.url(groupsEndpointUrl))(headers ⇒ ws.url(groupsEndpointUrl).addHttpHeaders(headers)) - apiCall.get.flatMap { r ⇒ + val apiCall = authHeader.fold(ws.url(groupsEndpointUrl))(headers => ws.url(groupsEndpointUrl).addHttpHeaders(headers)) + apiCall.get.flatMap { r => extractGroupsThenBuildUserFields(jsValue, r.json) } - case None ⇒ + case None => logger.debug(s"Extracting groups from user info") extractGroupsThenBuildUserFields(jsValue, jsValue) } @@ -73,22 +73,22 @@ class GroupUserMapper( private def extractGroupsThenBuildUserFields(jsValue: JsValue, groupsContainer: JsValue): Future[Fields] = groupsContainer \ groupsAttrName match { // Groups received as valid JSON array - case JsDefined(JsArray(groupsList)) ⇒ mapGroupsAndBuildUserFields(jsValue, groupsList.map(_.as[String]).toList) + case JsDefined(JsArray(groupsList)) => mapGroupsAndBuildUserFields(jsValue, groupsList.map(_.as[String]).toList) // Groups list received as string (invalid JSON, for example: "ROLE" or "['Role 1', ROLE2, 'Role_3']") - case JsDefined(JsString(groupsStr)) ⇒ + case JsDefined(JsString(groupsStr)) => val parser = new RoleListParser parser.parseAll(parser.expr, groupsStr) match { - case parser.Success(result, _) ⇒ mapGroupsAndBuildUserFields(jsValue, result) - case err: parser.NoSuccess ⇒ Future.failed(AuthenticationError(s"User info fails: can't parse groups list (${err.msg})")) + case parser.Success(result, _) => mapGroupsAndBuildUserFields(jsValue, result) + case err: parser.NoSuccess => Future.failed(AuthenticationError(s"User info fails: can't parse groups list (${err.msg})")) } // Invalid group list - case JsDefined(error) ⇒ + case JsDefined(error) => Future.failed(AuthenticationError(s"User info fails: invalid groups list received in user info ('$error' of type ${error.getClass})")) // Groups field is undefined - case _: JsUndefined ⇒ + case _: JsUndefined => Future.failed(AuthenticationError(s"User info fails: groups attribute $groupsAttrName doesn't exist in user info")) } @@ -103,12 +103,12 @@ class GroupUserMapper( logger.debug(s"Computed roles: ${roles.mkString(", ")}") val fields = for { - login ← (jsValue \ loginAttrName).validate[String] - name ← (jsValue \ nameAttrName).validate[String] - } yield Fields(Json.obj("login" → login.toLowerCase, "name" → name, "roles" → roles)) + login <- (jsValue \ loginAttrName).validate[String] + name <- (jsValue \ nameAttrName).validate[String] + } yield Fields(Json.obj("login" -> login.toLowerCase, "name" -> name, "roles" -> roles)) fields match { - case JsSuccess(f, _) ⇒ Future.successful(f) - case JsError(errors) ⇒ + case JsSuccess(f, _) => Future.successful(f) + case JsError(errors) => Future.failed(AuthenticationError(s"User info fails: ${errors.map(_._2).map(_.map(_.messages.mkString(", ")).mkString("; ")).mkString}")) } } diff --git a/thehive-backend/app/services/mappers/SimpleUserMapper.scala b/thehive-backend/app/services/mappers/SimpleUserMapper.scala index f0c933083d..baee917eb1 100644 --- a/thehive-backend/app/services/mappers/SimpleUserMapper.scala +++ b/thehive-backend/app/services/mappers/SimpleUserMapper.scala @@ -31,13 +31,13 @@ class SimpleUserMapper( override def getUserFields(jsValue: JsValue, authHeader: Option[(String, String)]): Future[Fields] = { val fields = for { - login ← (jsValue \ loginAttrName).validate[String] - name ← (jsValue \ nameAttrName).validate[String] - roles = rolesAttrName.fold(defaultRoles)(r ⇒ (jsValue \ r).asOpt[Seq[String]].getOrElse(defaultRoles)) - } yield Fields(Json.obj("login" → login.toLowerCase, "name" → name, "roles" → roles)) + login <- (jsValue \ loginAttrName).validate[String] + name <- (jsValue \ nameAttrName).validate[String] + roles = rolesAttrName.fold(defaultRoles)(r => (jsValue \ r).asOpt[Seq[String]].getOrElse(defaultRoles)) + } yield Fields(Json.obj("login" -> login.toLowerCase, "name" -> name, "roles" -> roles)) fields match { - case JsSuccess(f, _) ⇒ Future.successful(f) - case JsError(errors) ⇒ + case JsSuccess(f, _) => Future.successful(f) + case JsError(errors) => Future.failed(AuthenticationError(s"User info fails: ${errors.map(_._2).map(_.map(_.messages.mkString(", ")).mkString("; ")).mkString}")) } } diff --git a/thehive-cortex/app/connectors/cortex/CortexConnector.scala b/thehive-cortex/app/connectors/cortex/CortexConnector.scala index 7d25dd9749..6470d05e54 100644 --- a/thehive-cortex/app/connectors/cortex/CortexConnector.scala +++ b/thehive-cortex/app/connectors/cortex/CortexConnector.scala @@ -14,7 +14,7 @@ class CortexConnector extends ConnectorModule with AkkaGuiceSupport { registerController[CortexCtrl] bindActor[JobReplicateActor]("JobReplicateActor") } catch { - case t: Throwable ⇒ logger.error("Cortex connector is disabled because its configuration is invalid", t) + case t: Throwable => logger.error("Cortex connector is disabled because its configuration is invalid", t) } } } diff --git a/thehive-cortex/app/connectors/cortex/controllers/CortexCtrl.scala b/thehive-cortex/app/connectors/cortex/controllers/CortexCtrl.scala index c0c463a689..56ead36fce 100644 --- a/thehive-cortex/app/connectors/cortex/controllers/CortexCtrl.scala +++ b/thehive-cortex/app/connectors/cortex/controllers/CortexCtrl.scala @@ -74,17 +74,17 @@ class CortexCtrl( private var _status = JsObject.empty private def updateStatus(): Unit = Future - .traverse(cortexConfig.instances)(instance ⇒ instance.status()) + .traverse(cortexConfig.instances)(instance => instance.status()) .onComplete { - case Success(statusDetails) ⇒ - val distinctStatus = statusDetails.map(s ⇒ (s \ "status").as[String]).toSet + case Success(statusDetails) => + val distinctStatus = statusDetails.map(s => (s \ "status").as[String]).toSet val healthStatus = if (distinctStatus.contains("OK")) { if (distinctStatus.size > 1) "WARNING" else "OK" } else "ERROR" - _status = Json.obj("enabled" → true, "servers" → statusDetails, "status" → healthStatus) + _status = Json.obj("enabled" -> true, "servers" -> statusDetails, "status" -> healthStatus) system.scheduler.scheduleOnce(statusCheckInterval)(updateStatus()) - case _: Failure[_] ⇒ - _status = Json.obj("enabled" → true, "servers" → JsObject.empty, "status" → "ERROR") + case _: Failure[_] => + _status = Json.obj("enabled" -> true, "servers" -> JsObject.empty, "status" -> "ERROR") system.scheduler.scheduleOnce(statusCheckInterval)(updateStatus()) } updateStatus() @@ -96,14 +96,14 @@ class CortexCtrl( Future .traverse(cortexConfig.instances)(_.health()) .onComplete { - case Success(healthStatus) ⇒ + case Success(healthStatus) => val distinctStatus = healthStatus.toSet _health = if (distinctStatus.contains(HealthStatus.Ok)) { if (distinctStatus.size > 1) HealthStatus.Warning else HealthStatus.Ok } else if (distinctStatus.contains(HealthStatus.Error)) HealthStatus.Error else HealthStatus.Warning system.scheduler.scheduleOnce(statusCheckInterval)(updateHealth()) - case _: Failure[_] ⇒ + case _: Failure[_] => _health = HealthStatus.Error system.scheduler.scheduleOnce(statusCheckInterval)(updateHealth()) } @@ -112,59 +112,59 @@ class CortexCtrl( override def health: HealthStatus.Type = _health val router: Router = SimpleRouter { - case POST(p"/job") ⇒ createJob - case GET(p"/job/$jobId<[^/]*>") ⇒ getJob(jobId) - case POST(p"/job/_search") ⇒ findJob - case POST(p"/job/_stats") ⇒ statsJob + case POST(p"/job") => createJob + case GET(p"/job/$jobId<[^/]*>") => getJob(jobId) + case POST(p"/job/_search") => findJob + case POST(p"/job/_stats") => statsJob - case GET(p"/analyzer/$analyzerId<[^/]*>") ⇒ getAnalyzer(analyzerId) - case GET(p"/analyzer/type/$dataType<[^/]*>") ⇒ getAnalyzerFor(dataType) - case GET(p"/analyzer") ⇒ listAnalyzer + case GET(p"/analyzer/$analyzerId<[^/]*>") => getAnalyzer(analyzerId) + case GET(p"/analyzer/type/$dataType<[^/]*>") => getAnalyzerFor(dataType) + case GET(p"/analyzer") => listAnalyzer - case GET(p"/responder/$responderId<[^/]*>") ⇒ getResponder(responderId) - case GET(p"/responder") ⇒ findResponder - case POST(p"/responder/_search") ⇒ findResponder - case GET(p"/responder/$entityType<[^/]*>/$entityId<[^/]*>") ⇒ getResponders(entityType, entityId) + case GET(p"/responder/$responderId<[^/]*>") => getResponder(responderId) + case GET(p"/responder") => findResponder + case POST(p"/responder/_search") => findResponder + case GET(p"/responder/$entityType<[^/]*>/$entityId<[^/]*>") => getResponders(entityType, entityId) - case POST(p"/action") ⇒ createAction - case GET(p"/action") ⇒ findAction - case POST(p"/action/_search") ⇒ findAction - case POST(p"/action/_stats") ⇒ statsAction - case GET(p"/action/$entityType<[^/]*>/$entityId<[^/]*>") ⇒ getActions(entityType, entityId) - case GET(p"/action/$actionId<[^/]*>") ⇒ getAction(actionId) + case POST(p"/action") => createAction + case GET(p"/action") => findAction + case POST(p"/action/_search") => findAction + case POST(p"/action/_stats") => statsAction + case GET(p"/action/$entityType<[^/]*>/$entityId<[^/]*>") => getActions(entityType, entityId) + case GET(p"/action/$actionId<[^/]*>") => getAction(actionId) - case POST(p"/report/template/_search") ⇒ reportTemplateCtrl.find() - case POST(p"/report/template") ⇒ reportTemplateCtrl.create() - case GET(p"/report/template/$caseTemplateId<[^/]*>") ⇒ reportTemplateCtrl.get(caseTemplateId) - case PATCH(p"/report/template/$caseTemplateId<[^/]*>") ⇒ reportTemplateCtrl.update(caseTemplateId) - case DELETE(p"/report/template/$caseTemplateId<[^/]*>") ⇒ reportTemplateCtrl.delete(caseTemplateId) - case GET(p"/report/template/content/$analyzerId<[^/]*>/$reportType<[^/]*>") ⇒ reportTemplateCtrl.getContent(analyzerId, reportType) - case POST(p"/report/template/_import") ⇒ reportTemplateCtrl.importTemplatePackage - case r ⇒ throw NotFoundError(s"${r.uri} not found") + case POST(p"/report/template/_search") => reportTemplateCtrl.find() + case POST(p"/report/template") => reportTemplateCtrl.create() + case GET(p"/report/template/$caseTemplateId<[^/]*>") => reportTemplateCtrl.get(caseTemplateId) + case PATCH(p"/report/template/$caseTemplateId<[^/]*>") => reportTemplateCtrl.update(caseTemplateId) + case DELETE(p"/report/template/$caseTemplateId<[^/]*>") => reportTemplateCtrl.delete(caseTemplateId) + case GET(p"/report/template/content/$analyzerId<[^/]*>/$reportType<[^/]*>") => reportTemplateCtrl.getContent(analyzerId, reportType) + case POST(p"/report/template/_import") => reportTemplateCtrl.importTemplatePackage + case r => throw NotFoundError(s"${r.uri} not found") } @Timed - def createJob: Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def createJob: Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => val analyzerId = request.body.getString("analyzerId").getOrElse(throw BadRequestError(s"analyzerId is missing")) val artifactId = request.body.getString("artifactId").getOrElse(throw BadRequestError(s"artifactId is missing")) val cortexId = request.body.getString("cortexId") - cortexAnalyzerSrv.submitJob(cortexId, analyzerId, artifactId).map { job ⇒ + cortexAnalyzerSrv.submitJob(cortexId, analyzerId, artifactId).map { job => renderer.toOutput(OK, job) } } @Timed - def getJob(jobId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def getJob(jobId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val withStats = request.body.getBoolean("nstats").getOrElse(false) for { - job ← cortexAnalyzerSrv.getJob(jobId) + job <- cortexAnalyzerSrv.getJob(jobId) jobJson = job.toJson - jobWithStats ← if (withStats) cortexAnalyzerSrv.addImportFieldInArtifacts(jobJson) else Future.successful(jobJson) + jobWithStats <- if (withStats) cortexAnalyzerSrv.addImportFieldInArtifacts(jobJson) else Future.successful(jobJson) } yield Ok(jobWithStats) } @Timed - def findJob: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def findJob: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -177,7 +177,7 @@ class CortexCtrl( } @Timed - def statsJob: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def statsJob: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request .body .getValue("query") @@ -187,59 +187,59 @@ class CortexCtrl( .getValue("stats") .getOrElse(throw BadRequestError("Parameter \"stats\" is missing")) .as[Seq[Agg]] - cortexAnalyzerSrv.stats(query, aggs).map(s ⇒ Ok(s)) + cortexAnalyzerSrv.stats(query, aggs).map(s => Ok(s)) } @Timed - def getAnalyzer(analyzerId: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ - cortexAnalyzerSrv.getAnalyzer(analyzerId).map { analyzer ⇒ + def getAnalyzer(analyzerId: String): Action[AnyContent] = authenticated(Roles.read).async { _ => + cortexAnalyzerSrv.getAnalyzer(analyzerId).map { analyzer => renderer.toOutput(OK, analyzer) } } @Timed - def getAnalyzerFor(dataType: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ - cortexAnalyzerSrv.getAnalyzersFor(dataType).map { analyzers ⇒ + def getAnalyzerFor(dataType: String): Action[AnyContent] = authenticated(Roles.read).async { _ => + cortexAnalyzerSrv.getAnalyzersFor(dataType).map { analyzers => renderer.toOutput(OK, analyzers) } } @Timed - def listAnalyzer: Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ - cortexAnalyzerSrv.listAnalyzer.map { analyzers ⇒ + def listAnalyzer: Action[AnyContent] = authenticated(Roles.read).async { _ => + cortexAnalyzerSrv.listAnalyzer.map { analyzers => renderer.toOutput(OK, analyzers) } } - def getResponder(responderId: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ - cortexActionSrv.getResponderById(responderId).map { responder ⇒ + def getResponder(responderId: String): Action[AnyContent] = authenticated(Roles.read).async { _ => + cortexActionSrv.getResponderById(responderId).map { responder => renderer.toOutput(OK, responder) } } - def getResponders(entityType: String, entityId: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ - cortexActionSrv.findResponderFor(entityType, entityId).map { responders ⇒ + def getResponders(entityType: String, entityId: String): Action[AnyContent] = authenticated(Roles.read).async { _ => + cortexActionSrv.findResponderFor(entityType, entityId).map { responders => renderer.toOutput(OK, responders) } } - def findResponder: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def findResponder: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query") match { - case Some(o: JsObject) ⇒ o - case _ ⇒ JsObject.empty + case Some(o: JsObject) => o + case _ => JsObject.empty } - cortexActionSrv.findResponders(query).map { responders ⇒ + cortexActionSrv.findResponders(query).map { responders => renderer.toOutput(OK, responders) } } - def createAction: Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ - cortexActionSrv.executeAction(request.body).map { action ⇒ + def createAction: Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => + cortexActionSrv.executeAction(request.body).map { action => renderer.toOutput(OK, action) } } - def findAction: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def findAction: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -248,13 +248,13 @@ class CortexCtrl( renderer.toOutput(OK, actions, total) } - def statsAction: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def statsAction: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val aggs = request.body.getValue("stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Seq[Agg]] - cortexActionSrv.stats(query, aggs).map(s ⇒ Ok(s)) + cortexActionSrv.stats(query, aggs).map(s => Ok(s)) } - def getActions(entityType: String, entityId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def getActions(entityType: String, entityId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => import org.elastic4play.services.QueryDSL._ val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -262,8 +262,8 @@ class CortexCtrl( renderer.toOutput(OK, actions, total) } - def getAction(actionId: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ - cortexActionSrv.getAction(actionId).map { action ⇒ + def getAction(actionId: String): Action[AnyContent] = authenticated(Roles.read).async { _ => + cortexActionSrv.getAction(actionId).map { action => renderer.toOutput(OK, action) } } diff --git a/thehive-cortex/app/connectors/cortex/controllers/ReportTemplateCtrl.scala b/thehive-cortex/app/connectors/cortex/controllers/ReportTemplateCtrl.scala index 8c36a0e14d..2a2affdd3c 100644 --- a/thehive-cortex/app/connectors/cortex/controllers/ReportTemplateCtrl.scala +++ b/thehive-cortex/app/connectors/cortex/controllers/ReportTemplateCtrl.scala @@ -38,50 +38,50 @@ class ReportTemplateCtrl @Inject()( private[ReportTemplateCtrl] lazy val logger = Logger(getClass) @Timed - def create: Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request ⇒ + def create: Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request => reportTemplateSrv .create(request.body) - .map(reportTemplate ⇒ renderer.toOutput(CREATED, reportTemplate)) + .map(reportTemplate => renderer.toOutput(CREATED, reportTemplate)) } @Timed - def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ + def get(id: String): Action[AnyContent] = authenticated(Roles.read).async { _ => reportTemplateSrv .get(id) - .map(reportTemplate ⇒ renderer.toOutput(OK, reportTemplate)) + .map(reportTemplate => renderer.toOutput(OK, reportTemplate)) } @Timed - def getContent(analyzerId: String, reportType: String): Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ + def getContent(analyzerId: String, reportType: String): Action[AnyContent] = authenticated(Roles.read).async { _ => import org.elastic4play.services.QueryDSL._ val (reportTemplates, total) = reportTemplateSrv.find(and("analyzerId" ~= analyzerId, "reportType" ~= reportType), Some("0-1"), Nil) - total.foreach { t ⇒ + total.foreach { t => if (t > 1) logger.warn(s"Multiple report templates match for analyzer $analyzerId with type $reportType") } reportTemplates .runWith(Sink.headOption) .map { - case Some(reportTemplate) ⇒ Ok(reportTemplate.content()).as("text/html") - case None ⇒ NotFound("") + case Some(reportTemplate) => Ok(reportTemplate.content()).as("text/html") + case None => NotFound("") } } @Timed - def update(id: String): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request ⇒ + def update(id: String): Action[Fields] = authenticated(Roles.admin).async(fieldsBodyParser) { implicit request => reportTemplateSrv .update(id, request.body) - .map(reportTemplate ⇒ renderer.toOutput(OK, reportTemplate)) + .map(reportTemplate => renderer.toOutput(OK, reportTemplate)) } @Timed - def delete(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ + def delete(id: String): Action[AnyContent] = authenticated(Roles.admin).async { implicit request => reportTemplateSrv .delete(id) - .map(_ ⇒ NoContent) + .map(_ => NoContent) } @Timed - def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -94,13 +94,13 @@ class ReportTemplateCtrl @Inject()( } @Timed - def importTemplatePackage: Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + def importTemplatePackage: Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request => val zipFile = request.body.get("templates") match { - case Some(FileInputValue(_, filepath, _)) ⇒ new ZipFile(filepath.toFile) - case _ ⇒ throw BadRequestError("") + case Some(FileInputValue(_, filepath, _)) => new ZipFile(filepath.toFile) + case _ => throw BadRequestError("") } val importedReportTemplates: Seq[Future[(String, JsBoolean)]] = zipFile.getFileHeaders.asScala.filter(_ != null).collect { - case fileHeader: FileHeader if !fileHeader.isDirectory ⇒ + case fileHeader: FileHeader if !fileHeader.isDirectory => val Array(analyzerId, reportTypeHtml, _*) = (fileHeader.getFileName + "/").split("/", 3) val inputStream = zipFile.getInputStream(fileHeader) val content = Source.fromInputStream(inputStream).mkString @@ -116,20 +116,20 @@ class ReportTemplateCtrl @Inject()( reportTemplateSrv .create(reportTemplateFields) .recoverWith { // if creation fails, try to update - case NonFatal(_) ⇒ + case NonFatal(_) => val reportTemplateId = analyzerId + "_" + reportType reportTemplateSrv.update(reportTemplateId, Fields.empty.set("content", content)) } - .map(_.id → JsTrue) + .map(_.id -> JsTrue) .recoverWith { - case NonFatal(e) ⇒ + case NonFatal(e) => logger.error(s"The import of the report template $analyzerId ($reportType) has failed", e) val reportTemplateId = analyzerId + "_" + reportType - Future.successful(reportTemplateId → JsFalse) + Future.successful(reportTemplateId -> JsFalse) } } - Future.sequence(importedReportTemplates).map { result ⇒ + Future.sequence(importedReportTemplates).map { result => renderer.toOutput(OK, JsObject(result)) } } diff --git a/thehive-cortex/app/connectors/cortex/models/Action.scala b/thehive-cortex/app/connectors/cortex/models/Action.scala index b41a575919..1b74c08eee 100644 --- a/thehive-cortex/app/connectors/cortex/models/Action.scala +++ b/thehive-cortex/app/connectors/cortex/models/Action.scala @@ -7,12 +7,12 @@ import javax.inject.{Inject, Singleton} import scala.concurrent.Future import play.api.libs.json.JsObject import org.elastic4play.JsonFormat.dateFormat -import org.elastic4play.models.{AttributeDef, BaseEntity, EntityDef, ModelDef, AttributeFormat ⇒ F, AttributeOption ⇒ O} +import org.elastic4play.models.{AttributeDef, BaseEntity, EntityDef, ModelDef, AttributeFormat => F, AttributeOption => O} import org.elastic4play.utils.RichJson import connectors.cortex.models.JsonFormat.jobStatusFormat import services.AuditedModel -trait ActionAttributes { _: AttributeDef ⇒ +trait ActionAttributes { _: AttributeDef => val responderId = attribute("responderId", F.stringFmt, "Analyzer", O.readonly) val responderName = optionalAttribute("responderName", F.stringFmt, "Name of the responder", O.readonly) val responderDefinition = optionalAttribute("responderDefinition", F.stringFmt, "Name of the responder definition", O.readonly) diff --git a/thehive-cortex/app/connectors/cortex/models/Job.scala b/thehive-cortex/app/connectors/cortex/models/Job.scala index 85ead46bf5..2809a8218f 100644 --- a/thehive-cortex/app/connectors/cortex/models/Job.scala +++ b/thehive-cortex/app/connectors/cortex/models/Job.scala @@ -12,7 +12,7 @@ import models.{Artifact, ArtifactModel} import services.AuditedModel import org.elastic4play.JsonFormat.dateFormat -import org.elastic4play.models.{AttributeDef, BaseEntity, ChildModelDef, EntityDef, HiveEnumeration, AttributeFormat ⇒ F, AttributeOption ⇒ O} +import org.elastic4play.models.{AttributeDef, BaseEntity, ChildModelDef, EntityDef, HiveEnumeration, AttributeFormat => F, AttributeOption => O} import org.elastic4play.utils.RichJson object JobStatus extends Enumeration with HiveEnumeration { @@ -20,7 +20,7 @@ object JobStatus extends Enumeration with HiveEnumeration { val InProgress, Success, Failure, Waiting = Value } -trait JobAttributes { _: AttributeDef ⇒ +trait JobAttributes { _: AttributeDef => val analyzerId = attribute("analyzerId", F.stringFmt, "Analyzer", O.readonly) val analyzerName = optionalAttribute("analyzerName", F.stringFmt, "Name of the analyzer", O.readonly) val analyzerDefinition = optionalAttribute("analyzerDefinition", F.stringFmt, "Name of the analyzer definition", O.readonly) @@ -51,13 +51,14 @@ object Job { def fixJobAttr(attr: JsObject): JsObject = { val analyzerId = (attr \ "analyzerId").as[String] - val attrWithAnalyzerName = (attr \ "analyzerName").asOpt[String].fold(attr + ("analyzerName" → JsString(analyzerId)))(_ ⇒ attr) - (attr \ "analyzerDefinition").asOpt[String].fold(attrWithAnalyzerName + ("analyzerDefinition" → JsString(analyzerId)))(_ ⇒ attrWithAnalyzerName) + val attrWithAnalyzerName = (attr \ "analyzerName").asOpt[String].fold(attr + ("analyzerName" -> JsString(analyzerId)))(_ => attr) + (attr \ "analyzerDefinition").asOpt[String].fold(attrWithAnalyzerName + ("analyzerDefinition" -> JsString(analyzerId)))(_ => attrWithAnalyzerName) } } class Job(model: JobModel, attributes: JsObject) extends EntityDef[JobModel, Job](model, Job.fixJobAttr(attributes)) with JobAttributes { - override def toJson: JsObject = super.toJson + ("report" → report().fold[JsValue](JsObject.empty)(r ⇒ Json.parse(r))) // FIXME is parse fails (invalid report) + override def toJson: JsObject = + super.toJson + ("report" -> report().fold[JsValue](JsObject.empty)(r => Json.parse(r))) // FIXME is parse fails (invalid report) } case class CortexJob( diff --git a/thehive-cortex/app/connectors/cortex/models/JsonFormat.scala b/thehive-cortex/app/connectors/cortex/models/JsonFormat.scala index a15f1dae70..2cbdff534f 100644 --- a/thehive-cortex/app/connectors/cortex/models/JsonFormat.scala +++ b/thehive-cortex/app/connectors/cortex/models/JsonFormat.scala @@ -9,65 +9,65 @@ import java.util.Date object JsonFormat { private val analyzerWrites = Writes[Analyzer]( - analyzer ⇒ + analyzer => Json.obj( - "id" → analyzer.id, - "name" → analyzer.name, - "version" → analyzer.version, - "description" → analyzer.description, - "dataTypeList" → analyzer.dataTypeList, - "cortexIds" → analyzer.cortexIds + "id" -> analyzer.id, + "name" -> analyzer.name, + "version" -> analyzer.version, + "description" -> analyzer.description, + "dataTypeList" -> analyzer.dataTypeList, + "cortexIds" -> analyzer.cortexIds ) ) private val analyzerReads = Reads[Analyzer]( - json ⇒ + json => for { - name ← (json \ "name").validate[String] - version ← (json \ "version").validate[String] + name <- (json \ "name").validate[String] + version <- (json \ "version").validate[String] definition = (name + "_" + version).replaceAll("\\.", "_") id = (json \ "id").asOpt[String].getOrElse(definition) renamed = if (id == definition) definition else name - description ← (json \ "description").validate[String] - dataTypeList ← (json \ "dataTypeList").validate[Seq[String]] + description <- (json \ "description").validate[String] + dataTypeList <- (json \ "dataTypeList").validate[Seq[String]] } yield Analyzer(id, renamed, version, description, dataTypeList) ) implicit val analyzerFormat: Format[Analyzer] = Format(analyzerReads, analyzerWrites) - private val fileArtifactWrites = OWrites[FileArtifact](fileArtifact ⇒ Json.obj("attributes" → fileArtifact.attributes)) + private val fileArtifactWrites = OWrites[FileArtifact](fileArtifact => Json.obj("attributes" -> fileArtifact.attributes)) private val fileArtifactReads = Reads[FileArtifact]( - json ⇒ - (json \ "attributes").validate[JsObject].map { attributes ⇒ + json => + (json \ "attributes").validate[JsObject].map { attributes => FileArtifact(Source.empty, attributes) } ) private val fileArtifactFormat = OFormat(fileArtifactReads, fileArtifactWrites) private val dataArtifactFormat = Json.format[DataArtifact] implicit val artifactReads: Reads[CortexArtifact] = Reads[CortexArtifact]( - json ⇒ + json => json.validate[JsObject].flatMap { - case a if a.keys.contains("data") ⇒ json.validate[DataArtifact](dataArtifactFormat) - case _ ⇒ json.validate[FileArtifact](fileArtifactFormat) + case a if a.keys.contains("data") => json.validate[DataArtifact](dataArtifactFormat) + case _ => json.validate[FileArtifact](fileArtifactFormat) } ) implicit def artifactWrites[A <: CortexArtifact]: OWrites[A] = OWrites[A] { - case dataArtifact: DataArtifact ⇒ dataArtifactFormat.writes(dataArtifact) - case fileArtifact: FileArtifact ⇒ fileArtifactWrites.writes(fileArtifact) + case dataArtifact: DataArtifact => dataArtifactFormat.writes(dataArtifact) + case fileArtifact: FileArtifact => fileArtifactWrites.writes(fileArtifact) } // implicit def artifactFormat[A <: CortexArtifact]: OFormat[A] = OFormat(artifactReads, artifactWrites) implicit val jobStatusFormat: Format[JobStatus.Type] = enumFormat(JobStatus) implicit val cortexJobReads: Reads[CortexJob] = Reads[CortexJob]( - json ⇒ + json => for { - id ← (json \ "id").validate[String] - analyzerId ← (json \ "workerId").orElse(json \ "analyzerId").validate[String] + id <- (json \ "id").validate[String] + analyzerId <- (json \ "workerId").orElse(json \ "analyzerId").validate[String] analyzerName = (json \ "workerName").orElse(json \ "analyzerName").validate[String].getOrElse(analyzerId) analyzerDefinition = (json \ "workerDefinitionId").orElse(json \ "analyzerDefinitionId").validate[String].getOrElse(analyzerId) - attributes = JsObject( - (json \ "tlp").asOpt[JsValue].map("tlp" -> _).toList ::: - (json \ "message").asOpt[JsValue].map("message" -> _).toList ::: + attributes = JsObject( + (json \ "tlp").asOpt[JsValue].map("tlp" -> _).toList ::: + (json \ "message").asOpt[JsValue].map("message" -> _).toList ::: (json \ "parameters").asOpt[JsValue].map("parameters" -> _).toList ) artifact = (json \ "artifact") @@ -78,38 +78,38 @@ object JsonFormat { .map(DataArtifact(_, attributes)) .getOrElse(FileArtifact(Source.empty, attributes)) } - date ← (json \ "date").validate[Date] - status ← (json \ "status").validate[JobStatus.Type] + date <- (json \ "date").validate[Date] + status <- (json \ "status").validate[JobStatus.Type] } yield CortexJob(id, analyzerId, analyzerName, analyzerDefinition, artifact, date, status) ) implicit val reportTypeFormat: Format[ReportType.Type] = enumFormat(ReportType) private val responderWrites = Writes[Responder]( - responder ⇒ + responder => Json.obj( - "id" → responder.id, - "name" → responder.name, - "version" → responder.version, - "description" → responder.description, - "dataTypeList" → responder.dataTypeList, - "maxTlp" → responder.maxTlp, - "maxPap" → responder.maxPap, - "cortexIds" → responder.cortexIds + "id" -> responder.id, + "name" -> responder.name, + "version" -> responder.version, + "description" -> responder.description, + "dataTypeList" -> responder.dataTypeList, + "maxTlp" -> responder.maxTlp, + "maxPap" -> responder.maxPap, + "cortexIds" -> responder.cortexIds ) ) private val responderReads = Reads[Responder]( - json ⇒ + json => for { - name ← (json \ "name").validate[String] - version ← (json \ "version").validate[String] + name <- (json \ "name").validate[String] + version <- (json \ "version").validate[String] definition = (name + "_" + version).replaceAll("\\.", "_") id = (json \ "id").asOpt[String].getOrElse(definition) renamed = if (id == definition) definition else name - description ← (json \ "description").validate[String] - dataTypeList ← (json \ "dataTypeList").validate[Seq[String]] - maxTlp ← (json \ "maxTlp").validateOpt[Long] - maxPap ← (json \ "maxPap").validateOpt[Long] + description <- (json \ "description").validate[String] + dataTypeList <- (json \ "dataTypeList").validate[Seq[String]] + maxTlp <- (json \ "maxTlp").validateOpt[Long] + maxPap <- (json \ "maxPap").validateOpt[Long] } yield Responder(id, renamed, version, description, dataTypeList, maxTlp, maxPap) ) implicit val responderFormat: Format[Responder] = Format(responderReads, responderWrites) diff --git a/thehive-cortex/app/connectors/cortex/models/ReportTemplate.scala b/thehive-cortex/app/connectors/cortex/models/ReportTemplate.scala index 271e885ba2..a6d139519c 100644 --- a/thehive-cortex/app/connectors/cortex/models/ReportTemplate.scala +++ b/thehive-cortex/app/connectors/cortex/models/ReportTemplate.scala @@ -4,7 +4,7 @@ import javax.inject.{Inject, Singleton} import play.api.libs.json.JsObject -import org.elastic4play.models.{AttributeDef, AttributeFormat ⇒ F, AttributeOption ⇒ O, EntityDef, ModelDef} +import org.elastic4play.models.{AttributeDef, AttributeFormat => F, AttributeOption => O, EntityDef, ModelDef} import org.elastic4play.models.BaseEntity import play.api.libs.json.JsString import scala.concurrent.Future @@ -17,7 +17,7 @@ object ReportType extends Enumeration with HiveEnumeration { val short, long = Value } -trait ReportTemplateAttributes { _: AttributeDef ⇒ +trait ReportTemplateAttributes { _: AttributeDef => val reportTemplateId = attribute("_id", F.stringFmt, "Report template id", O.model) val content = attribute("content", F.textFmt, "Content of the template") val reportType = attribute("reportType", F.enumFmt(ReportType), "Type of the report (short or long)") @@ -30,10 +30,10 @@ class ReportTemplateModel @Inject() with ReportTemplateAttributes { override def creationHook(parent: Option[BaseEntity], attrs: JsObject): Future[JsObject] = { val maybeId = for { - analyzerId ← (attrs \ "analyzerId").asOpt[String] - reportType ← (attrs \ "reportType").asOpt[String] + analyzerId <- (attrs \ "analyzerId").asOpt[String] + reportType <- (attrs \ "reportType").asOpt[String] } yield analyzerId + "_" + reportType - Future.successful(maybeId.fold(attrs)(id ⇒ attrs + ("_id" → JsString(id)))) + Future.successful(maybeId.fold(attrs)(id => attrs + ("_id" -> JsString(id)))) } } diff --git a/thehive-cortex/app/connectors/cortex/services/ActionOperation.scala b/thehive-cortex/app/connectors/cortex/services/ActionOperation.scala index b4445edbf8..2faad37b5c 100644 --- a/thehive-cortex/app/connectors/cortex/services/ActionOperation.scala +++ b/thehive-cortex/app/connectors/cortex/services/ActionOperation.scala @@ -99,61 +99,61 @@ case class AssignCase(owner: String, status: ActionOperationStatus.Type = Action } object ActionOperation { - val addTagToCaseWrites: OWrites[AddTagToCase] = Json.writes[AddTagToCase] - val addTagToArtifactWrites: OWrites[AddTagToArtifact] = Json.writes[AddTagToArtifact] - val createTaskWrites: OWrites[CreateTask] = Json.writes[CreateTask] - val addCustomFieldsWrites: OWrites[AddCustomFields] = Json.writes[AddCustomFields] - val closeTaskWrites: OWrites[CloseTask] = Json.writes[CloseTask] - val markAlertAsReadWrites: OWrites[MarkAlertAsRead] = Json.writes[MarkAlertAsRead] - val addLogToTaskWrites: OWrites[AddLogToTask] = Json.writes[AddLogToTask] - val addTagToAlertWrites: OWrites[AddTagToAlert] = Json.writes[AddTagToAlert] + val addTagToCaseWrites: OWrites[AddTagToCase] = Json.writes[AddTagToCase] + val addTagToArtifactWrites: OWrites[AddTagToArtifact] = Json.writes[AddTagToArtifact] + val createTaskWrites: OWrites[CreateTask] = Json.writes[CreateTask] + val addCustomFieldsWrites: OWrites[AddCustomFields] = Json.writes[AddCustomFields] + val closeTaskWrites: OWrites[CloseTask] = Json.writes[CloseTask] + val markAlertAsReadWrites: OWrites[MarkAlertAsRead] = Json.writes[MarkAlertAsRead] + val addLogToTaskWrites: OWrites[AddLogToTask] = Json.writes[AddLogToTask] + val addTagToAlertWrites: OWrites[AddTagToAlert] = Json.writes[AddTagToAlert] val addArtifactToCaseWrites: OWrites[AddArtifactToCase] = Json.writes[AddArtifactToCase] - val assignCaseWrites: OWrites[AssignCase] = Json.writes[AssignCase] + val assignCaseWrites: OWrites[AssignCase] = Json.writes[AssignCase] implicit val actionOperationReads: Reads[ActionOperation] = Reads[ActionOperation]( - json ⇒ + json => (json \ "type").asOpt[String].fold[JsResult[ActionOperation]](JsError("type is missing in action operation")) { - case "AddTagToCase" ⇒ (json \ "tag").validate[String].map(tag ⇒ AddTagToCase(tag)) - case "AddTagToArtifact" ⇒ (json \ "tag").validate[String].map(tag ⇒ AddTagToArtifact(tag)) - case "CreateTask" ⇒ JsSuccess(CreateTask(json.as[JsObject] - "type")) - case "AddCustomFields" ⇒ + case "AddTagToCase" => (json \ "tag").validate[String].map(tag => AddTagToCase(tag)) + case "AddTagToArtifact" => (json \ "tag").validate[String].map(tag => AddTagToArtifact(tag)) + case "CreateTask" => JsSuccess(CreateTask(json.as[JsObject] - "type")) + case "AddCustomFields" => for { - name ← (json \ "name").validate[String] - tpe ← (json \ "tpe").validate[String] - value ← (json \ "value").validate[JsValue] + name <- (json \ "name").validate[String] + tpe <- (json \ "tpe").validate[String] + value <- (json \ "value").validate[JsValue] } yield AddCustomFields(name, tpe, value) - case "CloseTask" ⇒ JsSuccess(CloseTask()) - case "MarkAlertAsRead" ⇒ JsSuccess(MarkAlertAsRead()) - case "AddLogToTask" ⇒ + case "CloseTask" => JsSuccess(CloseTask()) + case "MarkAlertAsRead" => JsSuccess(MarkAlertAsRead()) + case "AddLogToTask" => for { - content ← (json \ "content").validate[String] - owner ← (json \ "owner").validateOpt[String] + content <- (json \ "content").validate[String] + owner <- (json \ "owner").validateOpt[String] } yield AddLogToTask(content, owner) - case "AddArtifactToCase" ⇒ + case "AddArtifactToCase" => for { - data ← (json \ "data").validate[String] - dataType ← (json \ "dataType").validate[String] - dataMessage ← (json \ "message").validate[String] + data <- (json \ "data").validate[String] + dataType <- (json \ "dataType").validate[String] + dataMessage <- (json \ "message").validate[String] } yield AddArtifactToCase(data, dataType, dataMessage) - case "AssignCase" ⇒ + case "AssignCase" => for { - owner ← (json \ "owner").validate[String] + owner <- (json \ "owner").validate[String] } yield AssignCase(owner) - case "AddTagToAlert" ⇒ (json \ "tag").validate[String].map(tag ⇒ AddTagToAlert(tag)) - case other ⇒ JsError(s"Unknown operation $other") + case "AddTagToAlert" => (json \ "tag").validate[String].map(tag => AddTagToAlert(tag)) + case other => JsError(s"Unknown operation $other") } ) implicit val actionOperationWrites: Writes[ActionOperation] = Writes[ActionOperation] { - case a: AddTagToCase ⇒ addTagToCaseWrites.writes(a) - case a: AddTagToArtifact ⇒ addTagToArtifactWrites.writes(a) - case a: CreateTask ⇒ createTaskWrites.writes(a) - case a: AddCustomFields ⇒ addCustomFieldsWrites.writes(a) - case a: CloseTask ⇒ closeTaskWrites.writes(a) - case a: MarkAlertAsRead ⇒ markAlertAsReadWrites.writes(a) - case a: AddLogToTask ⇒ addLogToTaskWrites.writes(a) - case a: AddTagToAlert ⇒ addTagToAlertWrites.writes(a) - case a: AddArtifactToCase ⇒ addArtifactToCaseWrites.writes(a) - case a: AssignCase ⇒ assignCaseWrites.writes(a) - case a ⇒ Json.obj("unsupported operation" → a.toString) + case a: AddTagToCase => addTagToCaseWrites.writes(a) + case a: AddTagToArtifact => addTagToArtifactWrites.writes(a) + case a: CreateTask => createTaskWrites.writes(a) + case a: AddCustomFields => addCustomFieldsWrites.writes(a) + case a: CloseTask => closeTaskWrites.writes(a) + case a: MarkAlertAsRead => markAlertAsReadWrites.writes(a) + case a: AddLogToTask => addLogToTaskWrites.writes(a) + case a: AddTagToAlert => addTagToAlertWrites.writes(a) + case a: AddArtifactToCase => addArtifactToCaseWrites.writes(a) + case a: AssignCase => assignCaseWrites.writes(a) + case a => Json.obj("unsupported operation" -> a.toString) } } @@ -170,16 +170,16 @@ class ActionOperationSrv @Inject()( implicit val mat: Materializer ) { - lazy val logger: Logger = Logger(getClass) + lazy val logger: Logger = Logger(getClass) lazy val alertSrv: AlertSrv = alertSrvProvider.get def findCaseEntity(entity: BaseEntity): Future[Case] = { import org.elastic4play.services.QueryDSL._ (entity, entity.model) match { - case (c: Case, _) ⇒ Future.successful(c) - case (a: Alert, _) ⇒ a.caze().fold(Future.failed[Case](BadRequestError("Alert hasn't been imported to case")))(caseSrv.get) - case (_, model: ChildModelDef[_, _, _, _]) ⇒ + case (c: Case, _) => Future.successful(c) + case (a: Alert, _) => a.caze().fold(Future.failed[Case](BadRequestError("Alert hasn't been imported to case")))(caseSrv.get) + case (_, model: ChildModelDef[_, _, _, _]) => findSrv( model.parentModel, "_id" ~= entity.parentId.getOrElse(throw InternalError(s"Child entity $entity has no parent ID")), @@ -188,7 +188,7 @@ class ActionOperationSrv @Inject()( )._1 .runWith(Sink.head) .flatMap(findCaseEntity _) - case _ ⇒ Future.failed(BadRequestError("Case not found")) + case _ => Future.failed(BadRequestError("Case not found")) } } @@ -196,8 +196,8 @@ class ActionOperationSrv @Inject()( import org.elastic4play.services.QueryDSL._ (entity, entity.model) match { - case (a: Task, _) ⇒ Future.successful(a) - case (_, model: ChildModelDef[_, _, _, _]) ⇒ + case (a: Task, _) => Future.successful(a) + case (_, model: ChildModelDef[_, _, _, _]) => findSrv( model.parentModel, "_id" ~= entity.parentId.getOrElse(throw InternalError(s"Child entity $entity has no parent ID")), @@ -206,7 +206,7 @@ class ActionOperationSrv @Inject()( )._1 .runWith(Sink.head) .flatMap(findTaskEntity _) - case _ ⇒ Future.failed(BadRequestError("Task not found")) + case _ => Future.failed(BadRequestError("Task not found")) } } @@ -214,8 +214,8 @@ class ActionOperationSrv @Inject()( import org.elastic4play.services.QueryDSL._ (entity, entity.model) match { - case (a: Artifact, _) ⇒ Future.successful(a) - case (_, model: ChildModelDef[_, _, _, _]) ⇒ + case (a: Artifact, _) => Future.successful(a) + case (_, model: ChildModelDef[_, _, _, _]) => findSrv( model.parentModel, "_id" ~= entity.parentId.getOrElse(throw InternalError(s"Child entity $entity has no parent ID")), @@ -224,7 +224,7 @@ class ActionOperationSrv @Inject()( )._1 .runWith(Sink.head) .flatMap(findArtifactEntity _) - case _ ⇒ Future.failed(BadRequestError("Artifact not found")) + case _ => Future.failed(BadRequestError("Artifact not found")) } } @@ -232,93 +232,97 @@ class ActionOperationSrv @Inject()( if (operation.status == ActionOperationStatus.Waiting) { Retry()(classOf[ConflictError]) { operation match { - case AddTagToCase(tag, _, _) ⇒ + case AddTagToCase(tag, _, _) => for { - initialCase ← findCaseEntity(entity) - caze ← caseSrv.get(initialCase.id) - _ ← caseSrv.update( + initialCase <- findCaseEntity(entity) + caze <- caseSrv.get(initialCase.id) + _ <- caseSrv.update( caze, Fields.empty.set("tags", Json.toJson((caze.tags() :+ tag).distinct)), ModifyConfig(retryOnConflict = 0, seqNoAndPrimaryTerm = Some(caze.seqNo -> caze.primaryTerm)) ) } yield operation.updateStatus(ActionOperationStatus.Success, "") - case AddTagToArtifact(tag, _, _) ⇒ + case AddTagToArtifact(tag, _, _) => entity match { - case initialArtifact: Artifact ⇒ + case initialArtifact: Artifact => for { - artifact ← artifactSrv.get(initialArtifact.artifactId()) - _ ← artifactSrv.update( + artifact <- artifactSrv.get(initialArtifact.artifactId()) + _ <- artifactSrv.update( artifact.artifactId(), Fields.empty.set("tags", Json.toJson((artifact.tags() :+ tag).distinct)), ModifyConfig(retryOnConflict = 0, seqNoAndPrimaryTerm = Some(artifact.seqNo -> artifact.primaryTerm)) ) } yield operation.updateStatus(ActionOperationStatus.Success, "") - case _ ⇒ Future.failed(BadRequestError("Artifact not found")) + case _ => Future.failed(BadRequestError("Artifact not found")) } - case CreateTask(fields, _, _) ⇒ + case CreateTask(fields, _, _) => for { - caze ← findCaseEntity(entity) - _ ← taskSrv.create(caze, Fields(fields)) + caze <- findCaseEntity(entity) + _ <- taskSrv.create(caze, Fields(fields)) } yield operation.updateStatus(ActionOperationStatus.Success, "") - case AddCustomFields(name, tpe, value, _, _) ⇒ + case AddCustomFields(name, tpe, value, _, _) => for { - initialCase ← findCaseEntity(entity) - caze ← caseSrv.get(initialCase.id) - customFields = caze.customFields().asOpt[JsObject].getOrElse(JsObject.empty) ++ Json.obj(name → Json.obj(tpe → value)) - _ ← caseSrv.update( + initialCase <- findCaseEntity(entity) + caze <- caseSrv.get(initialCase.id) + customFields = caze.customFields().asOpt[JsObject].getOrElse(JsObject.empty) ++ Json.obj(name -> Json.obj(tpe -> value)) + _ <- caseSrv.update( caze, Fields.empty.set("customFields", customFields), ModifyConfig(retryOnConflict = 0, seqNoAndPrimaryTerm = Some(caze.seqNo -> caze.primaryTerm)) ) } yield operation.updateStatus(ActionOperationStatus.Success, "") - case CloseTask(_, _) ⇒ + case CloseTask(_, _) => for { - initialTask ← findTaskEntity(entity) - task ← taskSrv.get(initialTask.id) - _ ← taskSrv.update( + initialTask <- findTaskEntity(entity) + task <- taskSrv.get(initialTask.id) + _ <- taskSrv.update( task, Fields.empty.set("status", TaskStatus.Completed.toString).set("flag", JsFalse), ModifyConfig(retryOnConflict = 0, seqNoAndPrimaryTerm = Some(task.seqNo -> task.primaryTerm)) ) } yield operation.updateStatus(ActionOperationStatus.Success, "") - case MarkAlertAsRead(_, _) ⇒ + case MarkAlertAsRead(_, _) => entity match { - case alert: Alert ⇒ alertSrv.markAsRead(alert).map(_ ⇒ operation.updateStatus(ActionOperationStatus.Success, "")) - case _ ⇒ Future.failed(BadRequestError("Alert not found")) + case alert: Alert => alertSrv.markAsRead(alert).map(_ => operation.updateStatus(ActionOperationStatus.Success, "")) + case _ => Future.failed(BadRequestError("Alert not found")) } - case AddLogToTask(content, owner, _, _) ⇒ + case AddLogToTask(content, owner, _, _) => for { - task ← findTaskEntity(entity) - _ ← logSrv.create(task, Fields.empty.set("message", content).set("owner", owner.map(JsString))) + task <- findTaskEntity(entity) + _ <- logSrv.create(task, Fields.empty.set("message", content).set("owner", owner.map(JsString))) } yield operation.updateStatus(ActionOperationStatus.Success, "") - case AddArtifactToCase(data, dataType, dataMessage, _, _) ⇒ + case AddArtifactToCase(data, dataType, dataMessage, _, _) => for { - initialCase ← findCaseEntity(entity) - _ ← artifactSrv.create(initialCase.id, Fields.empty.set("data", data).set("dataType", dataType).set("message", dataMessage)) + initialCase <- findCaseEntity(entity) + _ <- artifactSrv.create(initialCase.id, Fields.empty.set("data", data).set("dataType", dataType).set("message", dataMessage)) } yield operation.updateStatus(ActionOperationStatus.Success, "") - case AssignCase(owner, _, _) ⇒ + case AssignCase(owner, _, _) => for { - initialCase ← findCaseEntity(entity) - caze ← caseSrv.get(initialCase.id) - _ ← caseSrv.update(caze, Fields.empty.set("owner", owner), ModifyConfig(retryOnConflict = 0, seqNoAndPrimaryTerm = Some(caze.seqNo -> caze.primaryTerm))) + initialCase <- findCaseEntity(entity) + caze <- caseSrv.get(initialCase.id) + _ <- caseSrv.update( + caze, + Fields.empty.set("owner", owner), + ModifyConfig(retryOnConflict = 0, seqNoAndPrimaryTerm = Some(caze.seqNo -> caze.primaryTerm)) + ) } yield operation.updateStatus(ActionOperationStatus.Success, "") - case AddTagToAlert(tag, _, _) ⇒ + case AddTagToAlert(tag, _, _) => entity match { - case initialAlert: Alert ⇒ + case initialAlert: Alert => for { - alert ← alertSrv.get(initialAlert.id) - _ ← alertSrv.update( + alert <- alertSrv.get(initialAlert.id) + _ <- alertSrv.update( alert.id, Fields.empty.set("tags", Json.toJson((alert.tags() :+ tag).distinct)), ModifyConfig(retryOnConflict = 0, seqNoAndPrimaryTerm = Some(alert.seqNo -> alert.primaryTerm)) ) } yield operation.updateStatus(ActionOperationStatus.Success, "") - case _ ⇒ Future.failed(BadRequestError("Alert not found")) + case _ => Future.failed(BadRequestError("Alert not found")) } - case o ⇒ Future.successful(operation.updateStatus(ActionOperationStatus.Failure, s"Operation $o not supported")) + case o => Future.successful(operation.updateStatus(ActionOperationStatus.Failure, s"Operation $o not supported")) } }.recover { - case error ⇒ + case error => logger.error("Operation execution fails", error) operation.updateStatus(ActionOperationStatus.Failure, error.getMessage) } diff --git a/thehive-cortex/app/connectors/cortex/services/CortexActionSrv.scala b/thehive-cortex/app/connectors/cortex/services/CortexActionSrv.scala index cc0a5e7db0..1a71a21bbf 100644 --- a/thehive-cortex/app/connectors/cortex/services/CortexActionSrv.scala +++ b/thehive-cortex/app/connectors/cortex/services/CortexActionSrv.scala @@ -24,7 +24,7 @@ import services.UserSrv import org.elastic4play.controllers.Fields import org.elastic4play.database.ModifyConfig import org.elastic4play.models.{AttributeOption, BaseEntity} -import org.elastic4play.services.{User ⇒ _, _} +import org.elastic4play.services.{User => _, _} import org.elastic4play.{BadRequestError, MissingAttributeError, NotFoundError} @Singleton @@ -48,25 +48,25 @@ class CortexActionSrv @Inject()( def getResponderById(id: String): Future[Responder] = id match { - case responderIdRegex(instanceId, responderId) ⇒ + case responderIdRegex(instanceId, responderId) => cortexConfig .instances .find(_.name == instanceId) .map(_.getResponderById(responderId)) .getOrElse(Future.failed(NotFoundError(s"Responder $id not found"))) - case _ ⇒ Future.firstCompletedOf(cortexConfig.instances.map(_.getResponderById(id))) + case _ => Future.firstCompletedOf(cortexConfig.instances.map(_.getResponderById(id))) } - def askRespondersOnAllCortex(f: CortexClient ⇒ Future[Seq[Responder]]): Future[Seq[Responder]] = + def askRespondersOnAllCortex(f: CortexClient => Future[Seq[Responder]]): Future[Seq[Responder]] = Future - .traverse(cortexConfig.instances) { cortex ⇒ - f(cortex).recover { case NonFatal(t) ⇒ logger.error("Request to Cortex fails", t); Nil } + .traverse(cortexConfig.instances) { cortex => + f(cortex).recover { case NonFatal(t) => logger.error("Request to Cortex fails", t); Nil } } .map(_.flatten) def findResponders(query: JsObject): Future[Seq[Responder]] = askRespondersOnAllCortex(_.findResponders(query)) - .map { responders ⇒ + .map { responders => responders .groupBy(_.name) .values @@ -76,20 +76,20 @@ class CortexActionSrv @Inject()( def findResponderFor(entityType: String, entityId: String): Future[Seq[Responder]] = for { - entity ← getEntity(entityType, entityId) - artifactTlp ← actionOperationSrv + entity <- getEntity(entityType, entityId) + artifactTlp <- actionOperationSrv .findArtifactEntity(entity) - .map(a ⇒ Some(a.tlp())) - .recover { case _ ⇒ None } - (tlp, pap) ← actionOperationSrv + .map(a => Some(a.tlp())) + .recover { case _ => None } + (tlp, pap) <- actionOperationSrv .findCaseEntity(entity) - .map { caze ⇒ + .map { caze => (artifactTlp.getOrElse(caze.tlp()), caze.pap()) } - .recover { case _ ⇒ (artifactTlp.getOrElse(0L), 0L) } - query = Json.obj("dataTypeList" → s"thehive:$entityType") - responders ← findResponders(query) - applicableResponders = responders.filter(w ⇒ w.maxTlp.fold(true)(_ >= tlp) && w.maxPap.fold(true)(_ >= pap)) + .recover { case _ => (artifactTlp.getOrElse(0L), 0L) } + query = Json.obj("dataTypeList" -> s"thehive:$entityType") + responders <- findResponders(query) + applicableResponders = responders.filter(w => w.maxTlp.fold(true)(_ >= tlp) && w.maxPap.fold(true)(_ >= pap)) } yield applicableResponders def find(queryDef: QueryDef, range: Option[String], sortBy: Seq[String]): (Source[Action, NotUsed], Future[Long]) = @@ -104,7 +104,7 @@ class CortexActionSrv @Inject()( update(actionId, fields, ModifyConfig.default) private def update(actionId: String, fields: Fields, modifyConfig: ModifyConfig)(implicit authContext: AuthContext): Future[Action] = - getAction(actionId).flatMap(action ⇒ update(action, fields, modifyConfig)) + getAction(actionId).flatMap(action => update(action, fields, modifyConfig)) private def update(action: Action, fields: Fields)(implicit authContext: AuthContext): Future[Action] = update(action, fields, ModifyConfig.default) @@ -123,7 +123,7 @@ class CortexActionSrv @Inject()( logger.debug(s"Requesting status of job $cortexJobId in cortex ${cortex.name} in order to update action $actionId") cortex .waitReport(cortexJobId, retryDelay) - .flatMap { j ⇒ + .flatMap { j => val status = (j \ "status").asOpt[JobStatus.Type].getOrElse(JobStatus.Failure) if (status == JobStatus.InProgress || status == JobStatus.Waiting) updateActionWithCortex(actionId, cortexJobId, entity, cortex) @@ -132,9 +132,9 @@ class CortexActionSrv @Inject()( val operations = (report \ "operations").asOpt[Seq[ActionOperation]].getOrElse(Nil) logger.debug(s"Job $cortexJobId in cortex ${cortex.name} has finished with status $status, updating action $actionId") val updatedAction = for { - action ← getSrv[ActionModel, Action](actionModel, actionId) - updatedOperations ← Future.traverse(operations) { op ⇒ - userSrv.extraAuthContext(ac ⇒ actionOperationSrv.execute(entity, op)(ac)) + action <- getSrv[ActionModel, Action](actionModel, actionId) + updatedOperations <- Future.traverse(operations) { op => + userSrv.extraAuthContext(ac => actionOperationSrv.execute(entity, op)(ac)) } actionFields = Fields .empty @@ -142,28 +142,28 @@ class CortexActionSrv @Inject()( .set("report", (report - "operations").toString) .set("endDate", Json.toJson(new Date)) .set("operations", Json.toJson(updatedOperations).toString) - updatedAction ← update(action, actionFields) + updatedAction <- update(action, actionFields) } yield updatedAction updatedAction.failed.foreach(logger.error(s"Update action fails", _)) updatedAction } } .recoverWith { - case CortexError(404, _, _) ⇒ + case CortexError(404, _, _) => logger.debug(s"The job $cortexJobId not found") val actionFields = Fields .empty .set("status", JobStatus.Failure.toString) .set("endDate", Json.toJson(new Date)) update(actionId, actionFields) - case _ if maxRetryOnError > 0 ⇒ + case _ if maxRetryOnError > 0 => logger.debug(s"Request of status of job $cortexJobId in cortex ${cortex.name} fails, restarting ...") val result = Promise[Action] system.scheduler.scheduleOnce(retryDelay) { updateActionWithCortex(actionId, cortexJobId, entity, cortex, retryDelay, maxRetryOnError - 1).onComplete(result.complete) } result.future - case _ ⇒ + case _ => logger.error(s"Request of status of job $cortexJobId in cortex ${cortex.name} fails and the number of errors reaches the limit, aborting") update( actionId, @@ -183,25 +183,25 @@ class CortexActionSrv @Inject()( ._1 .runWith(Sink.headOption) .flatMap { - case Some(entity) ⇒ Future.successful(entity) - case None ⇒ Future.failed(NotFoundError(s"$objectType $objectId not found")) + case Some(entity) => Future.successful(entity) + case None => Future.failed(NotFoundError(s"$objectType $objectId not found")) } } def getEntityLabel(entity: BaseEntity): String = entity match { - case c: Case ⇒ s"#${c.caseId()} ${c.title()}" - case a: Artifact ⇒ s"[${a.dataType()}] ${a.data().getOrElse(a.attachment().get.name)}" - case a: Alert ⇒ s"[${a.source()}:${a.sourceRef()}] ${a.title()}" - case l: Log ⇒ s"${l.message()} from ${l.createdBy}" - case t: Task ⇒ s"${t.title()} (${t.status()})" - case j: Job ⇒ s"${j.analyzerName()} (${j.status()})" - case a: Action ⇒ s"${a.responderName()} on ${a.objectType()}:${a.objectId()}" - case u: User ⇒ s"${u.userName()} (${u.userId()})" - case a: Audit ⇒ s"${a.operation()} on ${a.objectType()}:${a.objectId()}" - case ct: CaseTemplate ⇒ s"${ct.templateName()}" - case d: Dashboard ⇒ s"${d.title()}" - case rt: ReportTemplate ⇒ s"${rt.analyzerId()}/${rt.reportType()}" + case c: Case => s"#${c.caseId()} ${c.title()}" + case a: Artifact => s"[${a.dataType()}] ${a.data().getOrElse(a.attachment().get.name)}" + case a: Alert => s"[${a.source()}:${a.sourceRef()}] ${a.title()}" + case l: Log => s"${l.message()} from ${l.createdBy}" + case t: Task => s"${t.title()} (${t.status()})" + case j: Job => s"${j.analyzerName()} (${j.status()})" + case a: Action => s"${a.responderName()} on ${a.objectType()}:${a.objectId()}" + case u: User => s"${u.userName()} (${u.userId()})" + case a: Audit => s"${a.operation()} on ${a.objectType()}:${a.objectId()}" + case ct: CaseTemplate => s"${ct.templateName()}" + case d: Dashboard => s"${d.title()}" + case rt: ReportTemplate => s"${rt.analyzerId()}/${rt.reportType()}" } def executeAction(fields: Fields)(implicit authContext: AuthContext): Future[Action] = { @@ -213,24 +213,24 @@ class CortexActionSrv @Inject()( def getCortexClient: Future[(CortexClient, Responder)] = fields .getString("cortexId") - .map { cortexId ⇒ + .map { cortexId => cortexConfig .instances .find(_.name == cortexId) - .fold[Future[(CortexClient, Responder)]](Future.failed(NotFoundError(s"cortex $cortexId not found"))) { c ⇒ - getResponder(c).map(c → _) + .fold[Future[(CortexClient, Responder)]](Future.failed(NotFoundError(s"cortex $cortexId not found"))) { c => + getResponder(c).map(c -> _) } } .getOrElse { Future - .traverse(cortexConfig.instances) { c ⇒ + .traverse(cortexConfig.instances) { c => getResponder(c) .transform { - case Success(w) ⇒ Success(Some(c → w)) - case _ ⇒ Success(None) + case Success(w) => Success(Some(c -> w)) + case _ => Success(None) } } - .flatMap { responders ⇒ + .flatMap { responders => responders .flatten .headOption @@ -239,20 +239,20 @@ class CortexActionSrv @Inject()( } for { - objectType ← fields.getString("objectType").fold[Future[String]](Future.failed(MissingAttributeError("action.objectType")))(Future.successful) - objectId ← fields.getString("objectId").fold[Future[String]](Future.failed(MissingAttributeError("action.objectId")))(Future.successful) - (cortexClient, responder) ← getCortexClient + objectType <- fields.getString("objectType").fold[Future[String]](Future.failed(MissingAttributeError("action.objectType")))(Future.successful) + objectId <- fields.getString("objectId").fold[Future[String]](Future.failed(MissingAttributeError("action.objectId")))(Future.successful) + (cortexClient, responder) <- getCortexClient message = fields.getString("message").getOrElse("") parameters = fields.getValue("parameters") match { - case Some(o: JsObject) ⇒ o - case _ ⇒ JsObject.empty + case Some(o: JsObject) => o + case _ => JsObject.empty } - entity ← getEntity(objectType, objectId) - entityJson ← auxSrv(entity, 10, withStats = false, !_.contains(AttributeOption.sensitive)) - caze ← actionOperationSrv.findCaseEntity(entity).map(Some(_)).recover { case _ ⇒ None } + entity <- getEntity(objectType, objectId) + entityJson <- auxSrv(entity, 10, withStats = false, !_.contains(AttributeOption.sensitive)) + caze <- actionOperationSrv.findCaseEntity(entity).map(Some(_)).recover { case _ => None } tlp = fields.getLong("tlp").orElse(caze.map(_.tlp())).getOrElse(2L) pap = caze.map(_.pap()).getOrElse(2L) - jobJson ← cortexClient.execute( + jobJson <- cortexClient.execute( responder.id, getEntityLabel(entity), s"thehive:$objectType", @@ -260,10 +260,10 @@ class CortexActionSrv @Inject()( tlp, pap, message, - parameters + ("user" → JsString(authContext.userId)) + parameters + ("user" -> JsString(authContext.userId)) ) job = jobJson.as[CortexJob] //(cortexActionJobReads(cortexClient.name)) - action ← createSrv[ActionModel, Action]( + action <- createSrv[ActionModel, Action]( actionModel, Fields .empty @@ -279,7 +279,7 @@ class CortexActionSrv @Inject()( .set("cortexId", cortexClient.name) .set("cortexJobId", job.id) ) - _ = userSrv.extraAuthContext { extraAuthContext ⇒ + _ = userSrv.extraAuthContext { extraAuthContext => updateActionWithCortex(action.id, job.id, entity, cortexClient)(extraAuthContext) } } yield action diff --git a/thehive-cortex/app/connectors/cortex/services/CortexAnalyzerSrv.scala b/thehive-cortex/app/connectors/cortex/services/CortexAnalyzerSrv.scala index 5418088461..476297887a 100644 --- a/thehive-cortex/app/connectors/cortex/services/CortexAnalyzerSrv.scala +++ b/thehive-cortex/app/connectors/cortex/services/CortexAnalyzerSrv.scala @@ -18,7 +18,7 @@ import connectors.cortex.models.JsonFormat._ import connectors.cortex.models._ import javax.inject.{Inject, Singleton} import models.{Artifact, Case} -import services.{UserSrv ⇒ _, _} +import services.{UserSrv => _, _} import org.elastic4play.controllers.{Fields, FileInputValue} import org.elastic4play.database.{DBRemove, ModifyConfig} import org.elastic4play.services.JsonFormat.attachmentFormat @@ -43,25 +43,25 @@ class JobReplicateActor @Inject()(cortexSrv: CortexAnalyzerSrv, eventSrv: EventS } override def receive: Receive = { - case MergeArtifact(newArtifact, artifacts, authContext) ⇒ + case MergeArtifact(newArtifact, artifacts, authContext) => logger.info(s"Merging jobs from artifacts ${artifacts.map(_.id)} into artifact ${newArtifact.id}") import org.elastic4play.services.QueryDSL._ cortexSrv .find(and(parent("case_artifact", withId(artifacts.map(_.id): _*)), "status" ~= JobStatus.Success), Some("all"), Nil) ._1 - .mapAsyncUnordered(5) { job ⇒ + .mapAsyncUnordered(5) { job => val baseFields = Fields( - job.attributes - "_id" - "_routing" - "_parent" - "_type" - "_seqNo" - "_primaryTerm" - "createdBy" - "createdAt" - "updatedBy" - "updatedAt" - "user" + job.attributes - "_id" - "_routing" - "_parent" - "_type" - "_seqNo" - "_primaryTerm" - "createdBy" - "createdAt" - "updatedBy" - "updatedAt" - "user" ) val createdJob = cortexSrv.create(newArtifact, baseFields)(authContext) createdJob .failed - .foreach(error ⇒ logger.error(s"Fail to create job under artifact ${newArtifact.id}\n\tjob attributes: $baseFields", error)) + .foreach(error => logger.error(s"Fail to create job under artifact ${newArtifact.id}\n\tjob attributes: $baseFields", error)) createdJob } .runWith(Sink.ignore) () - case RemoveJobsOf(artifactId) ⇒ + case RemoveJobsOf(artifactId) => import org.elastic4play.services.QueryDSL._ cortexSrv .find(withParent("case_artifact", artifactId), Some("all"), Nil) @@ -93,17 +93,17 @@ class CortexAnalyzerSrv @Inject()( private[CortexAnalyzerSrv] lazy val logger = Logger(getClass) - userSrv.inInitAuthContext { implicit authContext ⇒ + userSrv.inInitAuthContext { implicit authContext => import org.elastic4play.services.QueryDSL._ logger.info(s"Search for unfinished job ...") val (jobs, total) = find("status" ~= "InProgress", Some("all"), Nil) - total.foreach(t ⇒ logger.info(s"$t jobs found")) + total.foreach(t => logger.info(s"$t jobs found")) jobs - .runForeach { job ⇒ + .runForeach { job => logger.info(s"Found job in progress, request its status to Cortex") (for { - cortexJobId ← job.cortexJobId() - cortexClient ← cortexConfig.instances.find(c ⇒ job.cortexId().contains(c.name)) + cortexJobId <- job.cortexJobId() + cortexClient <- cortexConfig.instances.find(c => job.cortexId().contains(c.name)) } yield updateJobWithCortex(job.id, cortexJobId, cortexClient)) .getOrElse { val jobFields = Fields @@ -122,7 +122,7 @@ class CortexAnalyzerSrv @Inject()( update(jobId, fields, ModifyConfig.default) private[CortexAnalyzerSrv] def update(jobId: String, fields: Fields, modifyConfig: ModifyConfig)(implicit authContext: AuthContext): Future[Job] = - getJob(jobId).flatMap(job ⇒ update(job, fields, modifyConfig)) + getJob(jobId).flatMap(job => update(job, fields, modifyConfig)) private[CortexAnalyzerSrv] def update(job: Job, fields: Fields)(implicit authContext: AuthContext): Future[Job] = update(job, fields, ModifyConfig.default) @@ -134,56 +134,56 @@ class CortexAnalyzerSrv @Inject()( findSrv[JobModel, Job](jobModel, queryDef, range, sortBy) def realDeleteJob(job: Job): Future[Unit] = - dbRemove(job).map(_ ⇒ ()) + dbRemove(job).map(_ => ()) def stats(query: QueryDef, aggs: Seq[Agg]): Future[JsObject] = findSrv(jobModel, query, aggs: _*) def getAnalyzer(analyzerId: String): Future[Analyzer] = Future - .traverse(cortexConfig.instances) { cortex ⇒ + .traverse(cortexConfig.instances) { cortex => cortex.getAnalyzer(analyzerId).map(Some(_)).fallbackTo(Future.successful(None)) } - .map { analyzers ⇒ + .map { analyzers => analyzers .foldLeft[Option[Analyzer]](None) { - case (Some(analyzer1), Some(analyzer2)) ⇒ Some(analyzer1.copy(cortexIds = analyzer1.cortexIds ++ analyzer2.cortexIds)) - case (maybeAnalyzer1, maybeAnalyzer2) ⇒ maybeAnalyzer1 orElse maybeAnalyzer2 + case (Some(analyzer1), Some(analyzer2)) => Some(analyzer1.copy(cortexIds = analyzer1.cortexIds ++ analyzer2.cortexIds)) + case (maybeAnalyzer1, maybeAnalyzer2) => maybeAnalyzer1 orElse maybeAnalyzer2 } .getOrElse(throw NotFoundError(s"Analyzer $analyzerId not found")) } - def askAnalyzersOnAllCortex(f: CortexClient ⇒ Future[Seq[Analyzer]]): Future[Seq[Analyzer]] = + def askAnalyzersOnAllCortex(f: CortexClient => Future[Seq[Analyzer]]): Future[Seq[Analyzer]] = Future - .traverse(cortexConfig.instances) { cortex ⇒ - f(cortex).recover { case NonFatal(t) ⇒ logger.error("Request to Cortex fails", t); Nil } + .traverse(cortexConfig.instances) { cortex => + f(cortex).recover { case NonFatal(t) => logger.error("Request to Cortex fails", t); Nil } } .map(_.flatten) def getAnalyzersFor(dataType: String): Future[Seq[Analyzer]] = Future - .traverse(cortexConfig.instances) { cortex ⇒ - cortex.listAnalyzerForType(dataType).recover { case NonFatal(t) ⇒ logger.error("Request to Cortex fails", t); Nil } + .traverse(cortexConfig.instances) { cortex => + cortex.listAnalyzerForType(dataType).recover { case NonFatal(t) => logger.error("Request to Cortex fails", t); Nil } } - .map { listOfListOfAnalyzers ⇒ + .map { listOfListOfAnalyzers => val analysers = listOfListOfAnalyzers.flatten analysers .groupBy(_.name) .values - .map(_.reduce((a1, a2) ⇒ a1.copy(cortexIds = a1.cortexIds ::: a2.cortexIds))) + .map(_.reduce((a1, a2) => a1.copy(cortexIds = a1.cortexIds ::: a2.cortexIds))) .toSeq } def listAnalyzer: Future[Seq[Analyzer]] = Future - .traverse(cortexConfig.instances) { cortex ⇒ - cortex.listAnalyzer.recover { case NonFatal(t) ⇒ logger.error("Request to Cortex fails", t); Nil } + .traverse(cortexConfig.instances) { cortex => + cortex.listAnalyzer.recover { case NonFatal(t) => logger.error("Request to Cortex fails", t); Nil } } - .map { listOfListOfAnalyzers ⇒ + .map { listOfListOfAnalyzers => val analysers = listOfListOfAnalyzers.flatten analysers .groupBy(_.name) .values - .map(_.reduceLeft((a1, a2) ⇒ a1.copy(cortexIds = a1.cortexIds ::: a2.cortexIds))) + .map(_.reduceLeft((a1, a2) => a1.copy(cortexIds = a1.cortexIds ::: a2.cortexIds))) .toSeq } @@ -202,26 +202,26 @@ class CortexAnalyzerSrv @Inject()( .find(and(criteria: _*), Some("0-1"), Nil) ._1 .runWith(Sink.headOption) - .map(_.fold[JsValue](JsNull)(a ⇒ JsString(a.id))) - .recover { case _ ⇒ JsNull } + .map(_.fold[JsValue](JsNull)(a => JsString(a.id))) + .recover { case _ => JsNull } } for { - caze ← caseSrv.find(child("case_artifact", withId((job \ "_parent").as[String])), Some("0-1"), Nil)._1.runWith(Sink.headOption) - updatedReport ← (job \ "report") + caze <- caseSrv.find(child("case_artifact", withId((job \ "_parent").as[String])), Some("0-1"), Nil)._1.runWith(Sink.headOption) + updatedReport <- (job \ "report") .asOpt[JsObject] - .map { report ⇒ + .map { report => val artifacts = for { - artifact ← (report \ "artifacts").asOpt[Seq[JsObject]].getOrElse(Nil) - dataType ← (artifact \ "dataType").asOpt[String] + artifact <- (report \ "artifacts").asOpt[Seq[JsObject]].getOrElse(Nil) + dataType <- (artifact \ "dataType").asOpt[String] data = (artifact \ "data").asOpt[String] attachmentId = (artifact \ "attachment" \ "id").asOpt[String] foundArtifactId = findArtifactId(caze.get, dataType, data, attachmentId) - } yield foundArtifactId.map(faid ⇒ artifact + ("id" → faid)) - Future.sequence(artifacts).map(a ⇒ report + ("artifacts" → JsArray(a))) + } yield foundArtifactId.map(faid => artifact + ("id" -> faid)) + Future.sequence(artifacts).map(a => report + ("artifacts" -> JsArray(a))) } .getOrElse(Future.successful(JsObject.empty)) - } yield job + ("report" → updatedReport) + } yield job + ("report" -> updatedReport) } def updateJobWithCortex( @@ -235,20 +235,20 @@ class CortexAnalyzerSrv @Inject()( def updateArtifactSummary(job: Job, report: JsObject): Future[Unit] = (report \ "summary") .asOpt[JsObject] - .map { jobSummary ⇒ + .map { jobSummary => Retry()(classOf[Exception]) { for { - artifact ← artifactSrv.get(job.artifactId()) + artifact <- artifactSrv.get(job.artifactId()) reports = Try(Json.parse(artifact.reports()).asOpt[JsObject]).toOption.flatten.getOrElse(JsObject.empty) - newReports = reports + (job.analyzerDefinition().getOrElse(job.analyzerId()) → jobSummary) - _ ← artifactSrv.update( + newReports = reports + (job.analyzerDefinition().getOrElse(job.analyzerId()) -> jobSummary) + _ <- artifactSrv.update( job.artifactId(), Fields.empty.set("reports", newReports.toString), ModifyConfig(retryOnConflict = 0, seqNoAndPrimaryTerm = Some(artifact.seqNo -> artifact.primaryTerm)) ) } yield () }.recover { - case NonFatal(t) ⇒ logger.warn(s"Unable to insert summary report in artifact", t) + case NonFatal(t) => logger.warn(s"Unable to insert summary report in artifact", t) } } .getOrElse(Future.successful(())) @@ -262,17 +262,17 @@ class CortexAnalyzerSrv @Inject()( ) cortex .getAttachment(id) - .flatMap(src ⇒ src.runWith(FileIO.toPath(file))) - .flatMap(_ ⇒ attachmentSrv.save(fiv)) - .andThen { case _ ⇒ Files.delete(file) } - .map(a ⇒ Some(artifact + ("attachment" → Json.toJson(a)))) - .recover { case _ ⇒ None } + .flatMap(src => src.runWith(FileIO.toPath(file))) + .flatMap(_ => attachmentSrv.save(fiv)) + .andThen { case _ => Files.delete(file) } + .map(a => Some(artifact + ("attachment" -> Json.toJson(a)))) + .recover { case _ => None } } logger.debug(s"Requesting status of job $cortexJobId in cortex ${cortex.name} in order to update job $jobId") cortex .waitReport(cortexJobId, retryDelay) - .flatMap { j ⇒ + .flatMap { j => val status = (j \ "status").asOpt[JobStatus.Type].getOrElse(JobStatus.Failure) if (status == JobStatus.InProgress || status == JobStatus.Waiting) updateJobWithCortex(jobId, cortexJobId, cortex) @@ -285,41 +285,41 @@ class CortexAnalyzerSrv @Inject()( (report \ "artifacts") .asOpt[Seq[JsObject]] .getOrElse(Nil) - ) { artifact ⇒ + ) { artifact => (artifact \ "dataType") .asOpt[String] .flatMap { - case "file" ⇒ - (artifact \ "attachment" \ "id").asOpt[String].map { id ⇒ + case "file" => + (artifact \ "attachment" \ "id").asOpt[String].map { id => downloadAndSaveAttachment(artifact, id) .andThen { - case attachmentArtifact ⇒ logger.debug(s"Download attachment $artifact => $attachmentArtifact") + case attachmentArtifact => logger.debug(s"Download attachment $artifact => $attachmentArtifact") } } - case _ ⇒ Some(Future.successful(Some(artifact))) + case _ => Some(Future.successful(Some(artifact))) } .getOrElse(Future.successful(None)) } - .map(a ⇒ report + ("artifacts" → JsArray(a.flatten))) + .map(a => report + ("artifacts" -> JsArray(a.flatten))) val updatedJob = for { - job ← getSrv[JobModel, Job](jobModel, jobId) - newReport ← reportWithDownloadedArtifacts + job <- getSrv[JobModel, Job](jobModel, jobId) + newReport <- reportWithDownloadedArtifacts jobFields = Fields .empty .set("status", status.toString) .set("report", newReport.toString) .set("endDate", Json.toJson(new Date)) - updatedJob ← update(job, jobFields) - _ ← if (status == JobStatus.Success) updateArtifactSummary(job, report) else Future.successful(()) + updatedJob <- update(job, jobFields) + _ <- if (status == JobStatus.Success) updateArtifactSummary(job, report) else Future.successful(()) } yield updatedJob updatedJob.failed.foreach(logger.error(s"Update job fails", _)) updatedJob } } .recoverWith { - case CortexError(404, _, _) ⇒ + case CortexError(404, _, _) => logger.debug(s"The job $cortexJobId not found") val jobFields = Fields .empty @@ -327,17 +327,17 @@ class CortexAnalyzerSrv @Inject()( .set("endDate", Json.toJson(new Date)) update(jobId, jobFields) /* Workaround */ - case CortexError(500, _, body) if Try((Json.parse(body) \ "type").as[String]) == Success("akka.pattern.AskTimeoutException") ⇒ + case CortexError(500, _, body) if Try((Json.parse(body) \ "type").as[String]) == Success("akka.pattern.AskTimeoutException") => logger.debug("Got a 500 Timeout, retry") updateJobWithCortex(jobId, cortexJobId, cortex) - case e if maxRetryOnError > 0 ⇒ + case e if maxRetryOnError > 0 => logger.debug(s"Request of status of job $cortexJobId in cortex ${cortex.name} fails, restarting ...", e) val result = Promise[Job] system.scheduler.scheduleOnce(retryDelay) { updateJobWithCortex(jobId, cortexJobId, cortex, retryDelay, maxRetryOnError - 1).onComplete(result.complete) } result.future - case e ⇒ + case e => logger.error( s"Request of status of job $cortexJobId in cortex ${cortex.name} fails and the number of errors reaches the limit, aborting", e @@ -354,25 +354,25 @@ class CortexAnalyzerSrv @Inject()( def submitJob(cortexId: Option[String], analyzerName: String, artifactId: String)(implicit authContext: AuthContext): Future[Job] = { val cortexClientAnalyzer = cortexId match { - case Some(id) ⇒ + case Some(id) => cortexConfig .instances .find(_.name == id) - .fold[Future[(CortexClient, Analyzer)]](Future.failed(NotFoundError(s"cortex $id not found"))) { c ⇒ + .fold[Future[(CortexClient, Analyzer)]](Future.failed(NotFoundError(s"cortex $id not found"))) { c => c.getAnalyzer(analyzerName) - .map(c → _) + .map(c -> _) } - case None ⇒ + case None => Future - .traverse(cortexConfig.instances) { c ⇒ + .traverse(cortexConfig.instances) { c => c.getAnalyzer(analyzerName) .transform { - case Success(w) ⇒ Success(Some(c → w)) - case _ ⇒ Success(None) + case Success(w) => Success(Some(c -> w)) + case _ => Success(None) } } - .flatMap { analyzers ⇒ + .flatMap { analyzers => analyzers .flatten .headOption @@ -381,25 +381,25 @@ class CortexAnalyzerSrv @Inject()( } cortexClientAnalyzer.flatMap { - case (cortex, analyzer) ⇒ + case (cortex, analyzer) => for { - artifact ← artifactSrv.get(artifactId) - caze ← caseSrv.get(artifact.parentId.get) + artifact <- artifactSrv.get(artifactId) + caze <- caseSrv.get(artifact.parentId.get) artifactAttributes = Json.obj( - "tlp" → artifact.tlp(), - "pap" → caze.pap(), - "dataType" → artifact.dataType(), - "message" → caze.caseId().toString + "tlp" -> artifact.tlp(), + "pap" -> caze.pap(), + "dataType" -> artifact.dataType(), + "message" -> caze.caseId().toString ) cortexArtifact = (artifact.data(), artifact.attachment()) match { - case (Some(data), None) ⇒ DataArtifact(data, artifactAttributes) - case (None, Some(attachment)) ⇒ - FileArtifact(attachmentSrv.source(attachment.id), artifactAttributes + ("attachment" → Json.toJson(attachment))) - case _ ⇒ throw InternalError(s"Artifact has invalid data : ${artifact.attributes}") + case (Some(data), None) => DataArtifact(data, artifactAttributes) + case (None, Some(attachment)) => + FileArtifact(attachmentSrv.source(attachment.id), artifactAttributes + ("attachment" -> Json.toJson(attachment))) + case _ => throw InternalError(s"Artifact has invalid data : ${artifact.attributes}") } - cortexJobJson ← cortex.analyze(analyzer.id, cortexArtifact) + cortexJobJson <- cortex.analyze(analyzer.id, cortexArtifact) cortexJob = cortexJobJson.as[CortexJob] - job ← create( + job <- create( artifact, Fields .empty diff --git a/thehive-cortex/app/connectors/cortex/services/CortexClient.scala b/thehive-cortex/app/connectors/cortex/services/CortexClient.scala index a5a5b7ed9e..224dcc8f1f 100644 --- a/thehive-cortex/app/connectors/cortex/services/CortexClient.scala +++ b/thehive-cortex/app/connectors/cortex/services/CortexClient.scala @@ -30,10 +30,10 @@ object CortexConfig { .map(CortexAuthentication.Key) .orElse { for { - basicEnabled ← configuration.getOptional[Boolean]("basicAuth") + basicEnabled <- configuration.getOptional[Boolean]("basicAuth") if basicEnabled - username ← configuration.getOptional[String]("username") - password ← configuration.getOptional[String]("password") + username <- configuration.getOptional[String]("username") + password <- configuration.getOptional[String]("password") } yield CortexAuthentication.Basic(username, password) } Some(new CortexClient(name, url, authentication, ws)) @@ -41,13 +41,13 @@ object CortexConfig { def getInstances(configuration: Configuration, globalWS: CustomWSAPI): Seq[CortexClient] = for { - cfg ← configuration.getOptional[Configuration]("cortex").toSeq + cfg <- configuration.getOptional[Configuration]("cortex").toSeq cortexWS = globalWS.withConfig(cfg) - key ← cfg.subKeys + key <- cfg.subKeys if key != "ws" - c ← Try(cfg.get[Configuration](key)).toOption + c <- Try(cfg.get[Configuration](key)).toOption instanceWS = cortexWS.withConfig(c) - cic ← getCortexClient(key, c, instanceWS) + cic <- getCortexClient(key, c, instanceWS) } yield cic } @@ -81,7 +81,7 @@ object CortexAuthentication { val name = "key" def apply(request: WSRequest): WSRequest = - request.withHttpHeaders(HeaderNames.AUTHORIZATION → s"Bearer $key") + request.withHttpHeaders(HeaderNames.AUTHORIZATION -> s"Bearer $key") } } @@ -92,42 +92,42 @@ class CortexClient(val name: String, baseUrl: String, authentication: Option[Cor private[CortexClient] lazy val logger = Logger(getClass) logger.info(s"new Cortex($name, $baseUrl) authentication: ${authentication.fold("no")(_.getClass.getName)}") - private def request[A](uri: String, f: WSRequest ⇒ Future[WSResponse], t: WSResponse ⇒ A)(implicit ec: ExecutionContext): Future[A] = { + private def request[A](uri: String, f: WSRequest => Future[WSResponse], t: WSResponse => A)(implicit ec: ExecutionContext): Future[A] = { val request = ws.url(s"$baseUrl/$uri") val authenticatedRequest = authentication.fold(request)(_.apply(request)) f(authenticatedRequest).map { - case response if response.status / 100 == 2 ⇒ t(response) - case error ⇒ throw CortexError(error.status, s"$baseUrl/$uri", error.body) + case response if response.status / 100 == 2 => t(response) + case error => throw CortexError(error.status, s"$baseUrl/$uri", error.body) } } def getAnalyzer(analyzerId: String)(implicit ec: ExecutionContext): Future[Analyzer] = request(s"api/analyzer/$analyzerId", _.get, _.json.as[Analyzer]) .map(_.copy(cortexIds = List(name))) - .recoverWith { case _ ⇒ getAnalyzerByName(analyzerId) } // if get analyzer using cortex2 API fails, try using legacy API + .recoverWith { case _ => getAnalyzerByName(analyzerId) } // if get analyzer using cortex2 API fails, try using legacy API def getResponderById(responderId: String)(implicit ec: ExecutionContext): Future[Responder] = request(s"api/responder/$responderId", _.get, _.json.as[Responder]).map(_.addCortexId(name)) def getResponderByName(responderName: String)(implicit ec: ExecutionContext): Future[Responder] = { - val searchRequest = Json.obj("query" → Json.obj("_field" → "name", "_value" → responderName), "range" → "0-1") + val searchRequest = Json.obj("query" -> Json.obj("_field" -> "name", "_value" -> responderName), "range" -> "0-1") request(s"api/responder/_search", _.post(searchRequest), _.json.as[Seq[Responder]]) - .flatMap { analyzers ⇒ + .flatMap { analyzers => analyzers .headOption - .fold[Future[Responder]](Future.failed(NotFoundError(s"responder $responderName not found"))) { responder ⇒ + .fold[Future[Responder]](Future.failed(NotFoundError(s"responder $responderName not found"))) { responder => Future.successful(responder.addCortexId(name)) } } } def getAnalyzerByName(analyzerName: String)(implicit ec: ExecutionContext): Future[Analyzer] = { - val searchRequest = Json.obj("query" → Json.obj("_field" → "name", "_value" → analyzerName), "range" → "0-1") + val searchRequest = Json.obj("query" -> Json.obj("_field" -> "name", "_value" -> analyzerName), "range" -> "0-1") request(s"api/analyzer/_search", _.post(searchRequest), _.json.as[Seq[Analyzer]]) - .flatMap { analyzers ⇒ + .flatMap { analyzers => analyzers .headOption - .fold[Future[Analyzer]](Future.failed(NotFoundError(s"analyzer $analyzerName not found"))) { analyzer ⇒ + .fold[Future[Analyzer]](Future.failed(NotFoundError(s"analyzer $analyzerName not found"))) { analyzer => Future.successful(analyzer.copy(cortexIds = List(name))) } } @@ -137,11 +137,11 @@ class CortexClient(val name: String, baseUrl: String, authentication: Option[Cor request(s"api/analyzer?range=all", _.get, _.json.as[Seq[Analyzer]]).map(_.map(_.copy(cortexIds = List(name)))) def findResponders(query: JsObject)(implicit ec: ExecutionContext): Future[Seq[Responder]] = - request(s"api/responder/_search?range=all", _.post(Json.obj("query" → query)), _.json.as[Seq[Responder]]).map(_.map(_.addCortexId(name))) + request(s"api/responder/_search?range=all", _.post(Json.obj("query" -> query)), _.json.as[Seq[Responder]]).map(_.map(_.addCortexId(name))) def analyze(analyzerId: String, artifact: CortexArtifact)(implicit ec: ExecutionContext): Future[JsValue] = artifact match { - case FileArtifact(data, attributes) ⇒ + case FileArtifact(data, attributes) => val body = Source( List( FilePart("data", (attributes \ "attachment" \ "name").asOpt[String].getOrElse("noname"), None, data), @@ -149,7 +149,7 @@ class CortexClient(val name: String, baseUrl: String, authentication: Option[Cor ) ) request(s"api/analyzer/$analyzerId/run", _.post(body), _.json) - case a: DataArtifact ⇒ + case a: DataArtifact => request(s"api/analyzer/$analyzerId/run", _.post(Json.toJson(a)), _.json.as[JsObject]) } @@ -157,7 +157,7 @@ class CortexClient(val name: String, baseUrl: String, authentication: Option[Cor implicit ec: ExecutionContext ): Future[JsValue] = { val body = - Json.obj("label" → label, "data" → data, "dataType" → dataType, "tlp" → tlp, "pap" → pap, "message" → message, "parameters" → parameters) + Json.obj("label" -> label, "data" -> data, "dataType" -> dataType, "tlp" -> tlp, "pap" -> pap, "message" -> message, "parameters" -> parameters) request(s"api/responder/$responderId/run", _.post(body), _.json.as[JsObject]) } @@ -167,43 +167,43 @@ class CortexClient(val name: String, baseUrl: String, authentication: Option[Cor def waitReport(jobId: String, atMost: Duration)(implicit ec: ExecutionContext): Future[JsObject] = request( s"api/job/$jobId/waitreport", - _.withQueryStringParameters("atMost" → atMost.toString).withRequestTimeout(atMost + 1.second).get, + _.withQueryStringParameters("atMost" -> atMost.toString).withRequestTimeout(atMost + 1.second).get, _.json.as[JsObject] ) def getVersion()(implicit ec: ExecutionContext): Future[Option[String]] = request("api/status", _.get, identity) .map { - case resp if resp.status / 100 == 2 ⇒ (resp.json \ "versions" \ "Cortex").asOpt[String] - case _ ⇒ None + case resp if resp.status / 100 == 2 => (resp.json \ "versions" \ "Cortex").asOpt[String] + case _ => None } - .recover { case _ ⇒ None } + .recover { case _ => None } def getCurrentUser()(implicit ec: ExecutionContext): Future[Option[String]] = request("api/user/current", _.get, identity) .map { - case resp if resp.status / 100 == 2 ⇒ (resp.json \ "id").asOpt[String] - case _ ⇒ None + case resp if resp.status / 100 == 2 => (resp.json \ "id").asOpt[String] + case _ => None } - .recover { case _ ⇒ None } + .recover { case _ => None } def status()(implicit ec: ExecutionContext): Future[JsObject] = for { - version ← getVersion() + version <- getVersion() versionValue = version.getOrElse("") - currentUser ← getCurrentUser() + currentUser <- getCurrentUser() status = if (version.isDefined && currentUser.isDefined) "OK" else if (version.isDefined) "AUTH_ERROR" else "ERROR" } yield { - Json.obj("name" → name, "version" → versionValue, "status" → status) + Json.obj("name" -> name, "version" -> versionValue, "status" -> status) } def health()(implicit ec: ExecutionContext): Future[HealthStatus.Type] = getVersion() .map { - case None ⇒ HealthStatus.Error - case _ ⇒ HealthStatus.Ok + case None => HealthStatus.Error + case _ => HealthStatus.Ok } def getAttachment(id: String)(implicit ec: ExecutionContext): Future[Source[ByteString, _]] = diff --git a/thehive-misp/app/connectors/misp/JsonFormat.scala b/thehive-misp/app/connectors/misp/JsonFormat.scala index ab45c69a81..68dce582dc 100644 --- a/thehive-misp/app/connectors/misp/JsonFormat.scala +++ b/thehive-misp/app/connectors/misp/JsonFormat.scala @@ -10,30 +10,30 @@ import org.elastic4play.services.JsonFormat.attachmentFormat object JsonFormat { - implicit val mispAlertReads: Reads[MispAlert] = Reads[MispAlert] { json ⇒ + implicit val mispAlertReads: Reads[MispAlert] = Reads[MispAlert] { json => for { - org ← (json \ "Orgc" \ "name").validate[String] - info ← (json \ "info").validate[String] - eventId ← (json \ "id").validate[String] - optTags ← (json \ "EventTag").validateOpt[Seq[JsValue]] - tags = optTags.toSeq.flatten.flatMap(t ⇒ (t \ "Tag" \ "name").asOpt[String]) + org <- (json \ "Orgc" \ "name").validate[String] + info <- (json \ "info").validate[String] + eventId <- (json \ "id").validate[String] + optTags <- (json \ "EventTag").validateOpt[Seq[JsValue]] + tags = optTags.toSeq.flatten.flatMap(t => (t \ "Tag" \ "name").asOpt[String]) tlp = tags .map(_.toLowerCase) .collectFirst { - case "tlp:white" ⇒ 0L - case "tlp:green" ⇒ 1L - case "tlp:amber" ⇒ 2L - case "tlp:red" ⇒ 3L + case "tlp:white" => 0L + case "tlp:green" => 1L + case "tlp:amber" => 2L + case "tlp:red" => 3L } .getOrElse(2L) alertTags = s"src:$org" +: tags.filterNot(_.toLowerCase.startsWith("tlp:")) - timestamp ← (json \ "timestamp").validate[String] + timestamp <- (json \ "timestamp").validate[String] date = new Date(timestamp.toLong * 1000) - publishTimestamp ← (json \ "publish_timestamp").validate[String] + publishTimestamp <- (json \ "publish_timestamp").validate[String] publishDate = new Date(publishTimestamp.toLong * 1000) - threatLevelString ← (json \ "threat_level_id").validate[String] + threatLevelString <- (json \ "threat_level_id").validate[String] threatLevel = threatLevelString.toLong - isPublished ← (json \ "published").validate[Boolean] + isPublished <- (json \ "published").validate[Boolean] extendsUuid = (json \ "extends_uuid").asOpt[String] } yield MispAlert( org, @@ -55,52 +55,52 @@ object JsonFormat { Json.writes[MispAlert].transform((_: JsValue).asInstanceOf[JsObject] - "isPublished" - "extendsUuid") implicit val attributeReads: Reads[MispAttribute] = Reads( - json ⇒ + json => for { - id ← (json \ "id").validate[String] - tpe ← (json \ "type").validate[String] - timestamp ← (json \ "timestamp").validate[String] + id <- (json \ "id").validate[String] + tpe <- (json \ "type").validate[String] + timestamp <- (json \ "timestamp").validate[String] date = new Date(timestamp.toLong * 1000) - comment ← (json \ "comment").validate[String].orElse(JsSuccess("")) - value ← (json \ "value").validate[String] - category ← (json \ "category").validate[String] - tags ← JsArray(json \ "EventTag" \\ "name").validate[Seq[String]] - toIds ← (json \ "to_ids").validate[Boolean] + comment <- (json \ "comment").validate[String].orElse(JsSuccess("")) + value <- (json \ "value").validate[String] + category <- (json \ "category").validate[String] + tags <- JsArray(json \ "EventTag" \\ "name").validate[Seq[String]] + toIds <- (json \ "to_ids").validate[Boolean] } yield MispAttribute(id, category, tpe, date, comment, value, tags, toIds) ) val tlpWrites: Writes[Long] = Writes[Long] { - case 0 ⇒ JsString("tlp:white") - case 1 ⇒ JsString("tlp:green") - case 2 ⇒ JsString("tlp:amber") - case 3 ⇒ JsString("tlp:red") - case _ ⇒ JsString("tlp:amber") + case 0 => JsString("tlp:white") + case 1 => JsString("tlp:green") + case 2 => JsString("tlp:amber") + case 3 => JsString("tlp:red") + case _ => JsString("tlp:amber") } - implicit val exportedAttributeWrites: Writes[ExportedMispAttribute] = Writes[ExportedMispAttribute] { attribute ⇒ + implicit val exportedAttributeWrites: Writes[ExportedMispAttribute] = Writes[ExportedMispAttribute] { attribute => Json.obj( - "category" → attribute.category, - "type" → attribute.tpe, - "value" → attribute.value.fold[String](identity, _.name), - "comment" → attribute.comment, - "Tag" → Json.arr(Json.obj("name" → tlpWrites.writes(attribute.tlp))), - "to_ids" → attribute.artifact.ioc() + "category" -> attribute.category, + "type" -> attribute.tpe, + "value" -> attribute.value.fold[String](identity, _.name), + "comment" -> attribute.comment, + "Tag" -> Json.arr(Json.obj("name" -> tlpWrites.writes(attribute.tlp))), + "to_ids" -> attribute.artifact.ioc() ) } - implicit val mispArtifactWrites: Writes[MispArtifact] = OWrites[MispArtifact] { artifact ⇒ + implicit val mispArtifactWrites: Writes[MispArtifact] = OWrites[MispArtifact] { artifact => Json.obj( - "dataType" → artifact.dataType, - "message" → artifact.message, - "tlp" → artifact.tlp, - "tags" → artifact.tags, - "startDate" → artifact.startDate, - "ioc" → artifact.ioc + "dataType" -> artifact.dataType, + "message" -> artifact.message, + "tlp" -> artifact.tlp, + "tags" -> artifact.tags, + "startDate" -> artifact.startDate, + "ioc" -> artifact.ioc ) + (artifact.value match { - case SimpleArtifactData(data) ⇒ "data" → JsString(data) - case RemoteAttachmentArtifact(filename, reference, tpe) ⇒ - "remoteAttachment" → Json.obj("filename" → filename, "reference" → reference, "type" → tpe) - case AttachmentArtifact(attachment) ⇒ "attachment" → Json.toJson(attachment) + case SimpleArtifactData(data) => "data" -> JsString(data) + case RemoteAttachmentArtifact(filename, reference, tpe) => + "remoteAttachment" -> Json.obj("filename" -> filename, "reference" -> reference, "type" -> tpe) + case AttachmentArtifact(attachment) => "attachment" -> Json.toJson(attachment) }) } } diff --git a/thehive-misp/app/connectors/misp/MispConfig.scala b/thehive-misp/app/connectors/misp/MispConfig.scala index 31745af075..2cd090ebf9 100644 --- a/thehive-misp/app/connectors/misp/MispConfig.scala +++ b/thehive-misp/app/connectors/misp/MispConfig.scala @@ -16,15 +16,15 @@ class MispConfig(val interval: FiniteDuration, val connections: Seq[MispConnecti this( configuration.getOptional[FiniteDuration]("misp.interval").getOrElse(1.hour), for { - cfg ← configuration.getOptional[Configuration]("misp").toSeq + cfg <- configuration.getOptional[Configuration]("misp").toSeq mispWS = globalWS.withConfig(cfg) defaultArtifactTags = cfg.getOptional[Seq[String]]("tags").getOrElse(Nil) - name ← cfg.subKeys + name <- cfg.subKeys - mispConnectionConfig ← Try(cfg.get[Configuration](name)).toOption.toSeq - url ← mispConnectionConfig.getOptional[String]("url") - key ← mispConnectionConfig.getOptional[String]("key") + mispConnectionConfig <- Try(cfg.get[Configuration](name)).toOption.toSeq + url <- mispConnectionConfig.getOptional[String]("url") + key <- mispConnectionConfig.getOptional[String]("key") instanceWS = mispWS.withConfig(mispConnectionConfig) artifactTags = mispConnectionConfig.getOptional[Seq[String]]("tags").getOrElse(defaultArtifactTags) caseTemplate = mispConnectionConfig.getOptional[String]("caseTemplate").orElse(defaultCaseTemplate) @@ -36,7 +36,7 @@ class MispConfig(val interval: FiniteDuration, val connections: Seq[MispConnecti whitelistTags = mispConnectionConfig.getOptional[Seq[String]]("whitelist.tags").fold(Set.empty[String])(_.toSet) purpose = mispConnectionConfig .getOptional[String]("purpose") - .fold(MispPurpose.ImportAndExport) { purposeName ⇒ + .fold(MispPurpose.ImportAndExport) { purposeName => Try(MispPurpose.withName(purposeName)).getOrElse { Logger(getClass).error( s"Incorrect value for MISP purpose ($name.purpose), one of (${MispPurpose.values.mkString(", ")}) was expected. Using default value: ImportAndExport " diff --git a/thehive-misp/app/connectors/misp/MispConnection.scala b/thehive-misp/app/connectors/misp/MispConnection.scala index 486b6cb3cd..6cf5ce2364 100644 --- a/thehive-misp/app/connectors/misp/MispConnection.scala +++ b/thehive-misp/app/connectors/misp/MispConnection.scala @@ -51,16 +51,16 @@ case class MispConnection( private[misp] def apply(url: String): WSRequest = ws.url(s"$baseUrl/$url") - .withHttpHeaders("Authorization" → key, "Accept" → "application/json") + .withHttpHeaders("Authorization" -> key, "Accept" -> "application/json") val (canImport, canExport) = purpose match { - case MispPurpose.ImportAndExport ⇒ (true, true) - case MispPurpose.ImportOnly ⇒ (true, false) - case MispPurpose.ExportOnly ⇒ (false, true) + case MispPurpose.ImportAndExport => (true, true) + case MispPurpose.ImportOnly => (true, false) + case MispPurpose.ExportOnly => (false, true) } def syncFrom(date: Date): Date = - maxAge.fold(date) { age ⇒ + maxAge.fold(date) { age => val now = new Date val dateThreshold = new Date(now.getTime - age.toMillis) @@ -87,22 +87,22 @@ case class MispConnection( apply("servers/getVersion") .get .map { - case resp if resp.status / 100 == 2 ⇒ (resp.json \ "version").asOpt[String] - case _ ⇒ None + case resp if resp.status / 100 == 2 => (resp.json \ "version").asOpt[String] + case _ => None } - .recover { case _ ⇒ None } + .recover { case _ => None } def status()(implicit ec: ExecutionContext): Future[JsObject] = getVersion() .map { - case Some(version) ⇒ Json.obj("name" → name, "version" → version, "status" → "OK", "url" → baseUrl, "purpose" → purpose.toString) - case None ⇒ Json.obj("name" → name, "version" → "", "status" → "ERROR", "url" → baseUrl, "purpose" → purpose.toString) + case Some(version) => Json.obj("name" -> name, "version" -> version, "status" -> "OK", "url" -> baseUrl, "purpose" -> purpose.toString) + case None => Json.obj("name" -> name, "version" -> "", "status" -> "ERROR", "url" -> baseUrl, "purpose" -> purpose.toString) } def healthStatus()(implicit ec: ExecutionContext): Future[HealthStatus.Type] = getVersion() .map { - case None ⇒ HealthStatus.Error - case _ ⇒ HealthStatus.Ok + case None => HealthStatus.Error + case _ => HealthStatus.Ok } } diff --git a/thehive-misp/app/connectors/misp/MispConnector.scala b/thehive-misp/app/connectors/misp/MispConnector.scala index 466ed199dd..b80cf9f1ce 100644 --- a/thehive-misp/app/connectors/misp/MispConnector.scala +++ b/thehive-misp/app/connectors/misp/MispConnector.scala @@ -15,7 +15,7 @@ class MispConnector extends ConnectorModule with AkkaGuiceSupport { bindActor[UpdateMispAlertArtifactActor]("UpdateMispAlertArtifactActor") registerController[MispCtrl] } catch { - case t: Throwable ⇒ logger.error("MISP connector is disabled because its configuration is invalid", t) + case t: Throwable => logger.error("MISP connector is disabled because its configuration is invalid", t) } } } diff --git a/thehive-misp/app/connectors/misp/MispConverter.scala b/thehive-misp/app/connectors/misp/MispConverter.scala index 88d4e0e2cd..19465317a6 100644 --- a/thehive-misp/app/connectors/misp/MispConverter.scala +++ b/thehive-misp/app/connectors/misp/MispConverter.scala @@ -35,11 +35,11 @@ trait MispConverter { val typesValues = types.zipAll(values, "noType", "noValue") val additionnalMessage = typesValues .map { - case (t, v) ⇒ s"$t: $v" + case (t, v) => s"$t: $v" } .mkString("\n") typesValues.map { - case (tpe, value) ⇒ + case (tpe, value) => artifact.copy(dataType = toArtifact(tpe), value = SimpleArtifactData(value), message = mispAttribute.comment + "\n" + additionnalMessage) } } else { @@ -50,170 +50,170 @@ trait MispConverter { def fromArtifact(dataType: String, data: Option[String]): (String, String) = dataType match { - case "filename" ⇒ "Payload delivery" → "filename" - case "fqdn" ⇒ "Network activity" → "hostname" - case "url" ⇒ "Network activity" → "url" - case "user-agent" ⇒ "Network activity" → "user-agent" - case "domain" ⇒ "Network activity" → "domain" - case "ip" ⇒ "Network activity" → "ip-src" - case "mail_subject" ⇒ "Payload delivery" → "email-subject" - case "hash" ⇒ + case "filename" => "Payload delivery" -> "filename" + case "fqdn" => "Network activity" -> "hostname" + case "url" => "Network activity" -> "url" + case "user-agent" => "Network activity" -> "user-agent" + case "domain" => "Network activity" -> "domain" + case "ip" => "Network activity" -> "ip-src" + case "mail_subject" => "Payload delivery" -> "email-subject" + case "hash" => data.fold(0)(_.length) match { - case 32 ⇒ "Payload delivery" → "md5" - case 40 ⇒ "Payload delivery" → "sha1" - case 64 ⇒ "Payload delivery" → "sha256" - case 56 ⇒ "Payload delivery" → "sha224" - case 71 ⇒ "Payload delivery" → "sha384" - case 128 ⇒ "Payload delivery" → "sha512" - case _ ⇒ "Payload delivery" → "other" + case 32 => "Payload delivery" -> "md5" + case 40 => "Payload delivery" -> "sha1" + case 64 => "Payload delivery" -> "sha256" + case 56 => "Payload delivery" -> "sha224" + case 71 => "Payload delivery" -> "sha384" + case 128 => "Payload delivery" -> "sha512" + case _ => "Payload delivery" -> "other" } - case "mail" ⇒ "Payload delivery" → "email-src" - case "registry" ⇒ "Persistence mechanism" → "regkey" - case "uri_path" ⇒ "Network activity" → "uri" - case "regexp" ⇒ "Other" → "other" - case "other" ⇒ "Other" → "other" - case "file" ⇒ "Payload delivery" → "malware-sample" - case _ ⇒ "Other" → "other" + case "mail" => "Payload delivery" -> "email-src" + case "registry" => "Persistence mechanism" -> "regkey" + case "uri_path" => "Network activity" -> "uri" + case "regexp" => "Other" -> "other" + case "other" => "Other" -> "other" + case "file" => "Payload delivery" -> "malware-sample" + case _ => "Other" -> "other" } def toArtifact(tpe: String): String = attribute2artifactLookup.getOrElse(tpe, "other") private lazy val attribute2artifactLookup = Map( - "md5" → "hash", - "sha1" → "hash", - "sha256" → "hash", - "filename" → "filename", - "pdb" → "other", - "filename|md5" → "other", - "filename|sha1" → "other", - "filename|sha256" → "other", - "ip-src" → "ip", - "ip-dst" → "ip", - "hostname" → "fqdn", - "domain" → "domain", - "domain|ip" → "other", - "email-src" → "mail", - "email-dst" → "mail", - "email-subject" → "mail_subject", - "email-attachment" → "other", - "float" → "other", - "url" → "url", - "http-method" → "other", - "user-agent" → "user-agent", - "regkey" → "registry", - "regkey|value" → "registry", - "AS" → "other", - "snort" → "other", - "pattern-in-file" → "other", - "pattern-in-traffic" → "other", - "pattern-in-memory" → "other", - "yara" → "other", - "sigma" → "other", - "vulnerability" → "other", - "attachment" → "file", - "malware-sample" → "file", - "link" → "other", - "comment" → "other", - "text" → "other", - "hex" → "other", - "other" → "other", - "named" → "other", - "mutex" → "other", - "target-user" → "other", - "target-email" → "mail", - "target-machine" → "fqdn", - "target-org" → "other", - "target-location" → "other", - "target-external" → "other", - "btc" → "other", - "iban" → "other", - "bic" → "other", - "bank-account-nr" → "other", - "aba-rtn" → "other", - "bin" → "other", - "cc-number" → "other", - "prtn" → "other", - "threat-actor" → "other", - "campaign-name" → "other", - "campaign-id" → "other", - "malware-type" → "other", - "uri" → "uri_path", - "authentihash" → "other", - "ssdeep" → "hash", - "imphash" → "hash", - "pehash" → "hash", - "impfuzzy" → "hash", - "sha224" → "hash", - "sha384" → "hash", - "sha512" → "hash", - "sha512/224" → "hash", - "sha512/256" → "hash", - "tlsh" → "other", - "filename|authentihash" → "other", - "filename|ssdeep" → "other", - "filename|imphash" → "other", - "filename|impfuzzy" → "other", - "filename|pehash" → "other", - "filename|sha224" → "other", - "filename|sha384" → "other", - "filename|sha512" → "other", - "filename|sha512/224" → "other", - "filename|sha512/256" → "other", - "filename|tlsh" → "other", - "windows-scheduled-task" → "other", - "windows-service-name" → "other", - "windows-service-displayname" → "other", - "whois-registrant-email" → "mail", - "whois-registrant-phone" → "other", - "whois-registrant-name" → "other", - "whois-registrar" → "other", - "whois-creation-date" → "other", - "x509-fingerprint-sha1" → "other", - "dns-soa-email" → "other", - "size-in-bytes" → "other", - "counter" → "other", - "datetime" → "other", - "cpe" → "other", - "port" → "other", - "ip-dst|port" → "other", - "ip-src|port" → "other", - "hostname|port" → "other", - "email-dst-display-name" → "other", - "email-src-display-name" → "other", - "email-header" → "other", - "email-reply-to" → "other", - "email-x-mailer" → "other", - "email-mime-boundary" → "other", - "email-thread-index" → "other", - "email-message-id" → "other", - "github-username" → "other", - "github-repository" → "other", - "github-organisation" → "other", - "jabber-id" → "other", - "twitter-id" → "other", - "first-name" → "other", - "middle-name" → "other", - "last-name" → "other", - "date-of-birth" → "other", - "place-of-birth" → "other", - "gender" → "other", - "passport-number" → "other", - "passport-country" → "other", - "passport-expiration" → "other", - "redress-number" → "other", - "nationality" → "other", - "visa-number" → "other", - "issue-date-of-the-visa" → "other", - "primary-residence" → "other", - "country-of-residence" → "other", - "special-service-request" → "other", - "frequent-flyer-number" → "other", - "travel-details" → "other", - "payment-details" → "other", - "place-port-of-original-embarkation" → "other", - "place-port-of-clearance" → "other", - "place-port-of-onward-foreign-destination" → "other", - "passenger-name-record-locator-number" → "other", - "mobile-application-id" → "other" + "md5" -> "hash", + "sha1" -> "hash", + "sha256" -> "hash", + "filename" -> "filename", + "pdb" -> "other", + "filename|md5" -> "other", + "filename|sha1" -> "other", + "filename|sha256" -> "other", + "ip-src" -> "ip", + "ip-dst" -> "ip", + "hostname" -> "fqdn", + "domain" -> "domain", + "domain|ip" -> "other", + "email-src" -> "mail", + "email-dst" -> "mail", + "email-subject" -> "mail_subject", + "email-attachment" -> "other", + "float" -> "other", + "url" -> "url", + "http-method" -> "other", + "user-agent" -> "user-agent", + "regkey" -> "registry", + "regkey|value" -> "registry", + "AS" -> "other", + "snort" -> "other", + "pattern-in-file" -> "other", + "pattern-in-traffic" -> "other", + "pattern-in-memory" -> "other", + "yara" -> "other", + "sigma" -> "other", + "vulnerability" -> "other", + "attachment" -> "file", + "malware-sample" -> "file", + "link" -> "other", + "comment" -> "other", + "text" -> "other", + "hex" -> "other", + "other" -> "other", + "named" -> "other", + "mutex" -> "other", + "target-user" -> "other", + "target-email" -> "mail", + "target-machine" -> "fqdn", + "target-org" -> "other", + "target-location" -> "other", + "target-external" -> "other", + "btc" -> "other", + "iban" -> "other", + "bic" -> "other", + "bank-account-nr" -> "other", + "aba-rtn" -> "other", + "bin" -> "other", + "cc-number" -> "other", + "prtn" -> "other", + "threat-actor" -> "other", + "campaign-name" -> "other", + "campaign-id" -> "other", + "malware-type" -> "other", + "uri" -> "uri_path", + "authentihash" -> "other", + "ssdeep" -> "hash", + "imphash" -> "hash", + "pehash" -> "hash", + "impfuzzy" -> "hash", + "sha224" -> "hash", + "sha384" -> "hash", + "sha512" -> "hash", + "sha512/224" -> "hash", + "sha512/256" -> "hash", + "tlsh" -> "other", + "filename|authentihash" -> "other", + "filename|ssdeep" -> "other", + "filename|imphash" -> "other", + "filename|impfuzzy" -> "other", + "filename|pehash" -> "other", + "filename|sha224" -> "other", + "filename|sha384" -> "other", + "filename|sha512" -> "other", + "filename|sha512/224" -> "other", + "filename|sha512/256" -> "other", + "filename|tlsh" -> "other", + "windows-scheduled-task" -> "other", + "windows-service-name" -> "other", + "windows-service-displayname" -> "other", + "whois-registrant-email" -> "mail", + "whois-registrant-phone" -> "other", + "whois-registrant-name" -> "other", + "whois-registrar" -> "other", + "whois-creation-date" -> "other", + "x509-fingerprint-sha1" -> "other", + "dns-soa-email" -> "other", + "size-in-bytes" -> "other", + "counter" -> "other", + "datetime" -> "other", + "cpe" -> "other", + "port" -> "other", + "ip-dst|port" -> "other", + "ip-src|port" -> "other", + "hostname|port" -> "other", + "email-dst-display-name" -> "other", + "email-src-display-name" -> "other", + "email-header" -> "other", + "email-reply-to" -> "other", + "email-x-mailer" -> "other", + "email-mime-boundary" -> "other", + "email-thread-index" -> "other", + "email-message-id" -> "other", + "github-username" -> "other", + "github-repository" -> "other", + "github-organisation" -> "other", + "jabber-id" -> "other", + "twitter-id" -> "other", + "first-name" -> "other", + "middle-name" -> "other", + "last-name" -> "other", + "date-of-birth" -> "other", + "place-of-birth" -> "other", + "gender" -> "other", + "passport-number" -> "other", + "passport-country" -> "other", + "passport-expiration" -> "other", + "redress-number" -> "other", + "nationality" -> "other", + "visa-number" -> "other", + "issue-date-of-the-visa" -> "other", + "primary-residence" -> "other", + "country-of-residence" -> "other", + "special-service-request" -> "other", + "frequent-flyer-number" -> "other", + "travel-details" -> "other", + "payment-details" -> "other", + "place-port-of-original-embarkation" -> "other", + "place-port-of-clearance" -> "other", + "place-port-of-onward-foreign-destination" -> "other", + "passenger-name-record-locator-number" -> "other", + "mobile-application-id" -> "other" ) } diff --git a/thehive-misp/app/connectors/misp/MispCtrl.scala b/thehive-misp/app/connectors/misp/MispCtrl.scala index 44d1eb4fbc..b15d8da334 100644 --- a/thehive-misp/app/connectors/misp/MispCtrl.scala +++ b/thehive-misp/app/connectors/misp/MispCtrl.scala @@ -74,17 +74,17 @@ class MispCtrl( private var _status = JsObject.empty private def updateStatus(): Unit = Future - .traverse(mispConfig.connections)(instance ⇒ instance.status()) + .traverse(mispConfig.connections)(instance => instance.status()) .onComplete { - case Success(statusDetails) ⇒ - val distinctStatus = statusDetails.map(s ⇒ (s \ "status").as[String]).toSet + case Success(statusDetails) => + val distinctStatus = statusDetails.map(s => (s \ "status").as[String]).toSet val healthStatus = if (distinctStatus.contains("OK")) { if (distinctStatus.size > 1) "WARNING" else "OK" } else "ERROR" - _status = Json.obj("enabled" → true, "servers" → statusDetails, "status" → healthStatus) + _status = Json.obj("enabled" -> true, "servers" -> statusDetails, "status" -> healthStatus) system.scheduler.scheduleOnce(checkStatusInterval)(updateStatus()) - case _: Failure[_] ⇒ - _status = Json.obj("enabled" → true, "servers" → JsObject.empty, "status" → "ERROR") + case _: Failure[_] => + _status = Json.obj("enabled" -> true, "servers" -> JsObject.empty, "status" -> "ERROR") system.scheduler.scheduleOnce(checkStatusInterval)(updateStatus()) } updateStatus() @@ -96,14 +96,14 @@ class MispCtrl( Future .traverse(mispConfig.connections)(_.healthStatus()) .onComplete { - case Success(healthStatus) ⇒ + case Success(healthStatus) => val distinctStatus = healthStatus.toSet _health = if (distinctStatus.contains(HealthStatus.Ok)) { if (distinctStatus.size > 1) HealthStatus.Warning else HealthStatus.Ok } else if (distinctStatus.contains(HealthStatus.Error)) HealthStatus.Error else HealthStatus.Warning system.scheduler.scheduleOnce(checkStatusInterval)(updateHealth()) - case _: Failure[_] ⇒ + case _: Failure[_] => _health = HealthStatus.Error system.scheduler.scheduleOnce(checkStatusInterval)(updateHealth()) } @@ -114,27 +114,27 @@ class MispCtrl( private[MispCtrl] lazy val logger = Logger(getClass) val router: Router = SimpleRouter { - case GET(p"/_syncAlerts") ⇒ syncAlerts - case GET(p"/_syncAllAlerts") ⇒ syncAllAlerts - case GET(p"/_syncArtifacts") ⇒ syncArtifacts - case POST(p"/export/$caseId/$mispName") ⇒ exportCase(mispName, caseId) - case r ⇒ throw NotFoundError(s"${r.uri} not found") + case GET(p"/_syncAlerts") => syncAlerts + case GET(p"/_syncAllAlerts") => syncAllAlerts + case GET(p"/_syncArtifacts") => syncArtifacts + case POST(p"/export/$caseId/$mispName") => exportCase(mispName, caseId) + case r => throw NotFoundError(s"${r.uri} not found") } @Timed - def syncAlerts: Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ + def syncAlerts: Action[AnyContent] = authenticated(Roles.admin).async { implicit request => mispSynchro .synchronize() - .map { m ⇒ + .map { m => Ok(Json.toJson(m)) } } @Timed - def syncAllAlerts: Action[AnyContent] = authenticated(Roles.admin).async { implicit request ⇒ + def syncAllAlerts: Action[AnyContent] = authenticated(Roles.admin).async { implicit request => mispSynchro .fullSynchronize() - .map { m ⇒ + .map { m => Ok(Json.toJson(m)) } } @@ -146,14 +146,14 @@ class MispCtrl( } @Timed - def exportCase(mispName: String, caseId: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request ⇒ + def exportCase(mispName: String, caseId: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request => caseSrv .get(caseId) - .flatMap { caze ⇒ + .flatMap { caze => mispExport.export(mispName, caze) } .map { - case (_, exportedAttributes) ⇒ + case (_, exportedAttributes) => renderer.toMultiOutput(CREATED, exportedAttributes) } } diff --git a/thehive-misp/app/connectors/misp/MispExport.scala b/thehive-misp/app/connectors/misp/MispExport.scala index 4e7bde9e08..6ab4e8b699 100644 --- a/thehive-misp/app/connectors/misp/MispExport.scala +++ b/thehive-misp/app/connectors/misp/MispExport.scala @@ -39,17 +39,17 @@ class MispExport @Inject()( alertSrv .find(and("type" ~= "misp", "case" ~= caseId, "source" ~= mispName), Some("0-1"), Nil) ._1 - .map { alert ⇒ - alert.id → alert.sourceRef() + .map { alert => + alert.id -> alert.sourceRef() } .runWith(Sink.headOption) - .map(alertIdSource ⇒ alertIdSource.map(_._1) → alertIdSource.map(_._2)) + .map(alertIdSource => alertIdSource.map(_._1) -> alertIdSource.map(_._2)) } def removeDuplicateAttributes(attributes: Seq[ExportedMispAttribute]): Seq[ExportedMispAttribute] = { var attrSet = Set.empty[(String, String, String)] val builder = Seq.newBuilder[ExportedMispAttribute] - attributes.foreach { attr ⇒ + attributes.foreach { attr => val tuple = (attr.category, attr.tpe, attr.value.fold(identity, _.name)) if (!attrSet.contains(tuple)) { builder += attr @@ -72,21 +72,21 @@ class MispExport @Inject()( logger.debug(s"Create MISP event $title, with ${attributes.size} attributes") val mispEvent = Json.obj( - "Event" → Json.obj( - "distribution" → 0, - "threat_level_id" → math.min(4, math.max(1, 4 - severity)), - "analysis" → 0, - "info" → title, - "date" → dateFormat.format(date), - "published" → false, - "Attribute" → attributes, - "Tag" → JsArray((tags.map(JsString.apply) :+ tlpWrites.writes(tlp)).map(t ⇒ Json.obj("name" → t))), - "extends_uuid" → extendsEvent.fold[JsValue](JsNull)(JsString) + "Event" -> Json.obj( + "distribution" -> 0, + "threat_level_id" -> math.min(4, math.max(1, 4 - severity)), + "analysis" -> 0, + "info" -> title, + "date" -> dateFormat.format(date), + "published" -> false, + "Attribute" -> attributes, + "Tag" -> JsArray((tags.map(JsString.apply) :+ tlpWrites.writes(tlp)).map(t => Json.obj("name" -> t))), + "extends_uuid" -> extendsEvent.fold[JsValue](JsNull)(JsString) ) ) mispConnection("events") .post(mispEvent) - .map { mispResponse ⇒ + .map { mispResponse => val eventId = (mispResponse.json \ "Event" \ "id") .asOpt[String] .getOrElse(throw InternalError(s"Unexpected MISP response: ${mispResponse.status} ${mispResponse.statusText}\n${mispResponse.body}")) @@ -96,59 +96,59 @@ class MispExport @Inject()( .getOrElse(JsObject.empty) .fields .toMap - .mapValues { m ⇒ + .mapValues { m => (m \ "value") .asOpt[Seq[String]] .flatMap(_.headOption) .getOrElse(s"Unexpected message format: $m") } val exportedAttributes = attributes.zipWithIndex.collect { // keep only attributes that succeed - case (attr, index) if !messages.contains(index.toString) ⇒ attr + case (attr, index) if !messages.contains(index.toString) => attr } - eventId → exportedAttributes + eventId -> exportedAttributes } } def exportAttribute(mispConnection: MispConnection, eventId: String, attribute: ExportedMispAttribute): Future[Artifact] = { val mispResponse = attribute match { - case ExportedMispAttribute(artifact, _, _, _, Right(attachment), comment) ⇒ + case ExportedMispAttribute(artifact, _, _, _, Right(attachment), comment) => attachmentSrv .source(attachment.id) .runReduce(_ ++ _) - .flatMap { data ⇒ + .flatMap { data => val b64data = java.util.Base64.getEncoder.encodeToString(data.toArray[Byte]) val body = Json.obj( - "request" → Json.obj( - "category" → "Payload delivery", - "type" → "malware-sample", - "comment" → comment, - "files" → Json.arr(Json.obj("filename" → attachment.name, "data" → b64data)), - "to_ids" → artifact.ioc() + "request" -> Json.obj( + "category" -> "Payload delivery", + "type" -> "malware-sample", + "comment" -> comment, + "files" -> Json.arr(Json.obj("filename" -> attachment.name, "data" -> b64data)), + "to_ids" -> artifact.ioc() ) ) mispConnection(s"events/upload_sample/$eventId").post(body) } - case attr ⇒ mispConnection(s"attributes/add/$eventId").post(Json.toJson(attr)) + case attr => mispConnection(s"attributes/add/$eventId").post(Json.toJson(attr)) } mispResponse.map { - case response if response.status / 100 == 2 ⇒ + case response if response.status / 100 == 2 => // then add tlp tag // doesn't work with file artifact (malware sample attribute) (response.json \ "Attribute" \ "id") .asOpt[String] - .foreach { attributeId ⇒ + .foreach { attributeId => mispConnection("/attributes/addTag") - .post(Json.obj("attribute" → attributeId, "tag" → tlpWrites.writes(attribute.tlp))) + .post(Json.obj("attribute" -> attributeId, "tag" -> tlpWrites.writes(attribute.tlp))) } attribute.artifact - case response ⇒ + case response => val json = response.json val message = (json \ "message").asOpt[String] val error = (json \ "errors" \ "value").head.asOpt[String] val errorMessage = for { - m ← message - e ← error + m <- message + e <- error } yield s"$m $e" throw MispExportError( errorMessage orElse message orElse error getOrElse s"Unexpected MISP response: ${response.status} ${response.statusText}\n${response.body}", @@ -161,12 +161,12 @@ class MispExport @Inject()( mispConnection(s"/events/getEditStrategy/$eventId") .get() .map { - case resp if resp.status / 100 == 2 ⇒ + case resp if resp.status / 100 == 2 => val body = resp.json val isEditStrategy = (body \ "strategy").asOpt[String].contains("edit") if (!isEditStrategy) (body \ "extensions" \ 0 \ "id").asOpt[String] else Some(eventId) - case _ ⇒ Some(eventId) + case _ => Some(eventId) } def export(mispName: String, caze: Case)(implicit authContext: AuthContext): Future[(String, Seq[Try[Artifact]])] = { @@ -176,12 +176,12 @@ class MispExport @Inject()( Future.failed(BadRequestError(s"Export on MISP connection $mispName is denied by configuration")) else { for { - (maybeAlertId, maybeEventId) ← relatedMispEvent(mispName, caze.id) - _ = logger.debug(maybeEventId.fold(s"Related MISP event doesn't exist")(e ⇒ s"Related MISP event found : $e")) - attributes ← mispSrv.getAttributesFromCase(caze) + (maybeAlertId, maybeEventId) <- relatedMispEvent(mispName, caze.id) + _ = logger.debug(maybeEventId.fold(s"Related MISP event doesn't exist")(e => s"Related MISP event found : $e")) + attributes <- mispSrv.getAttributesFromCase(caze) uniqueAttributes = removeDuplicateAttributes(attributes) - eventToUpdate ← maybeEventId.fold(Future.successful[Option[String]](None))(getUpdatableEvent(mispConnection, _)) - (eventId, initialExportesArtifacts, existingAttributes) ← eventToUpdate.fold { + eventToUpdate <- maybeEventId.fold(Future.successful[Option[String]](None))(getUpdatableEvent(mispConnection, _)) + (eventId, initialExportesArtifacts, existingAttributes) <- eventToUpdate.fold { logger.debug(s"Creating a new MISP event that extends $maybeEventId") val simpleAttributes = uniqueAttributes.filter(_.value.isLeft) // if no event is associated to this case, create a new one @@ -195,59 +195,59 @@ class MispExport @Inject()( maybeEventId, if (mispConnection.exportCaseTags) caze.tags() else Nil ).map { - case (eventId, exportedAttributes) ⇒ - (eventId, exportedAttributes.map(a ⇒ Success(a.artifact)), exportedAttributes.map(_.value.map(_.name))) + case (eventId, exportedAttributes) => + (eventId, exportedAttributes.map(a => Success(a.artifact)), exportedAttributes.map(_.value.map(_.name))) } - } { eventId ⇒ // if an event already exists, retrieve its attributes in order to export only new one + } { eventId => // if an event already exists, retrieve its attributes in order to export only new one logger.debug(s"Updating MISP event $eventId") - mispSrv.getAttributesFromMisp(mispConnection, eventId, None).map { attributes ⇒ + mispSrv.getAttributesFromMisp(mispConnection, eventId, None).map { attributes => (eventId, Nil, attributes.map { - case MispArtifact(SimpleArtifactData(data), _, _, _, _, _, _) ⇒ Left(data) - case MispArtifact(RemoteAttachmentArtifact(filename, _, _), _, _, _, _, _, _) ⇒ Right(filename) - case MispArtifact(AttachmentArtifact(Attachment(filename, _, _, _, _)), _, _, _, _, _, _) ⇒ Right(filename) + case MispArtifact(SimpleArtifactData(data), _, _, _, _, _, _) => Left(data) + case MispArtifact(RemoteAttachmentArtifact(filename, _, _), _, _, _, _, _, _) => Right(filename) + case MispArtifact(AttachmentArtifact(Attachment(filename, _, _, _, _)), _, _, _, _, _, _) => Right(filename) }) } } - newAttributes = uniqueAttributes.filterNot(attr ⇒ existingAttributes.contains(attr.value.map(_.name))) - exportedArtifact ← Future.traverse(newAttributes)(attr ⇒ exportAttribute(mispConnection, eventId, attr).toTry) - artifacts = uniqueAttributes.map { a ⇒ + newAttributes = uniqueAttributes.filterNot(attr => existingAttributes.contains(attr.value.map(_.name))) + exportedArtifact <- Future.traverse(newAttributes)(attr => exportAttribute(mispConnection, eventId, attr).toTry) + artifacts = uniqueAttributes.map { a => Json.obj( - "data" → a.artifact.data(), - "dataType" → a.artifact.dataType(), - "message" → a.artifact.message(), - "startDate" → a.artifact.startDate(), - "attachment" → a.artifact.attachment(), - "tlp" → a.artifact.tlp(), - "tags" → a.artifact.tags(), - "ioc" → a.artifact.ioc() + "data" -> a.artifact.data(), + "dataType" -> a.artifact.dataType(), + "message" -> a.artifact.message(), + "startDate" -> a.artifact.startDate(), + "attachment" -> a.artifact.attachment(), + "tlp" -> a.artifact.tlp(), + "tags" -> a.artifact.tags(), + "ioc" -> a.artifact.ioc() ) } - alert ← maybeAlertId.fold { + alert <- maybeAlertId.fold { alertSrv.create( Fields( Json.obj( - "type" → "misp", - "source" → mispName, - "sourceRef" → eventId, - "date" → caze.startDate(), - "lastSyncDate" → new Date(0), - "case" → caze.id, - "title" → caze.title(), - "description" → "Case have been exported to MISP", - "severity" → caze.severity(), - "tags" → caze.tags(), - "tlp" → caze.tlp(), - "artifacts" → artifacts, - "status" → "Imported", - "follow" → true + "type" -> "misp", + "source" -> mispName, + "sourceRef" -> eventId, + "date" -> caze.startDate(), + "lastSyncDate" -> new Date(0), + "case" -> caze.id, + "title" -> caze.title(), + "description" -> "Case have been exported to MISP", + "severity" -> caze.severity(), + "tags" -> caze.tags(), + "tlp" -> caze.tlp(), + "artifacts" -> artifacts, + "status" -> "Imported", + "follow" -> true ) ) ) - } { alertId ⇒ - alertSrv.update(alertId, Fields(Json.obj("artifacts" → artifacts, "status" → "Imported"))) + } { alertId => + alertSrv.update(alertId, Fields(Json.obj("artifacts" -> artifacts, "status" -> "Imported"))) } - } yield alert.id → (initialExportesArtifacts ++ exportedArtifact) + } yield alert.id -> (initialExportesArtifacts ++ exportedArtifact) } } } diff --git a/thehive-misp/app/connectors/misp/MispSrv.scala b/thehive-misp/app/connectors/misp/MispSrv.scala index 648641cc81..00b6842e5b 100644 --- a/thehive-misp/app/connectors/misp/MispSrv.scala +++ b/thehive-misp/app/connectors/misp/MispSrv.scala @@ -41,7 +41,7 @@ class MispSrv @Inject()( mispConfig .connections .find(_.name == name) - .fold(Future.failed[MispConnection](NotFoundError(s"""Configuration of MISP server "$name" not found"""))) { instanceConfig ⇒ + .fold(Future.failed[MispConnection](NotFoundError(s"""Configuration of MISP server "$name" not found"""))) { instanceConfig => Future.successful(instanceConfig) } @@ -50,7 +50,7 @@ class MispSrv @Inject()( require(!eventId.isEmpty) mispConnection(s"events/$eventId") .get() - .map { e ⇒ + .map { e => (e.json \ "Event") .as[MispAlert] .copy(source = mispConnection.name) @@ -63,9 +63,9 @@ class MispSrv @Inject()( Source .future { mispConnection("events/index") - .post(Json.obj("searchpublish_timestamp" → date)) + .post(Json.obj("searchpublish_timestamp" -> date)) } - .mapConcat { response ⇒ + .mapConcat { response => val eventJson = Try { response .body[JsValue] @@ -75,7 +75,7 @@ class MispSrv @Inject()( Nil } val events = eventJson - .flatMap { j ⇒ + .flatMap { j => j.asOpt[MispAlert] .orElse { logger.warn(s"MISP event can't be parsed\n$j") @@ -98,12 +98,12 @@ class MispSrv @Inject()( artifactSrv .find(and(withParent(caze), "status" ~= "Ok", "ioc" ~= true), Some("all"), Nil) ._1 - .map { artifact ⇒ + .map { artifact => val (category, tpe) = fromArtifact(artifact.dataType(), artifact.data()) val value = (artifact.data(), artifact.attachment()) match { - case (Some(data), None) ⇒ Left(data) - case (None, Some(attachment)) ⇒ Right(attachment) - case _ ⇒ + case (Some(data), None) => Left(data) + case (None, Some(attachment)) => Right(attachment) + case _ => logger.error(s"Artifact $artifact has neither data nor attachment") sys.error("???") } @@ -119,17 +119,17 @@ class MispSrv @Inject()( val date = fromDate.fold(0L)(_.getTime / 1000) mispConnection(s"attributes/restSearch/json") - .post(Json.obj("request" → Json.obj("timestamp" → date, "eventid" → eventId))) + .post(Json.obj("request" -> Json.obj("timestamp" -> date, "eventid" -> eventId))) // add ("deleted" → 1) to see also deleted attributes // add ("deleted" → "only") to see only deleted attributes .map(_.body) .map { - case body if mispConnection.maxSize.fold(false)(body.length > _) ⇒ + case body if mispConnection.maxSize.fold(false)(body.length > _) => logger.debug(s"Size of event exceeds (${body.length}) the configured limit") JsObject.empty - case body ⇒ Json.parse(body) + case body => Json.parse(body) } - .map { jsBody ⇒ + .map { jsBody => val refDate = fromDate.getOrElse(new Date(0)) val artifactTags = s"src:${mispConnection.name}" +: mispConnection.artifactTags (jsBody \ "response" \\ "Attribute") @@ -137,12 +137,12 @@ class MispSrv @Inject()( .filter(_.date after refDate) .flatMap(convertAttribute) .groupBy { - case MispArtifact(SimpleArtifactData(data), dataType, _, _, _, _, _) ⇒ dataType → Right(data) - case MispArtifact(RemoteAttachmentArtifact(filename, _, _), dataType, _, _, _, _, _) ⇒ dataType → Left(filename) - case MispArtifact(AttachmentArtifact(Attachment(filename, _, _, _, _)), dataType, _, _, _, _, _) ⇒ dataType → Left(filename) + case MispArtifact(SimpleArtifactData(data), dataType, _, _, _, _, _) => dataType -> Right(data) + case MispArtifact(RemoteAttachmentArtifact(filename, _, _), dataType, _, _, _, _, _) => dataType -> Left(filename) + case MispArtifact(AttachmentArtifact(Attachment(filename, _, _, _, _)), dataType, _, _, _, _, _) => dataType -> Left(filename) } .values - .map { mispArtifact ⇒ + .map { mispArtifact => mispArtifact.head.copy(tags = (mispArtifact.head.tags ++ artifactTags).distinct, tlp = 2L) } .toSeq @@ -154,29 +154,29 @@ class MispSrv @Inject()( ec: ExecutionContext ): Option[Future[Fields]] = (for { - dataType ← (attr \ "dataType").validate[String] - data ← (attr \ "data").validateOpt[String] - message ← (attr \ "message").validate[String] - startDate ← (attr \ "startDate").validate[Date] - attachmentReference ← (attr \ "remoteAttachment" \ "reference").validateOpt[String] - attachmentType ← (attr \ "remoteAttachment" \ "type").validateOpt[String] + dataType <- (attr \ "dataType").validate[String] + data <- (attr \ "data").validateOpt[String] + message <- (attr \ "message").validate[String] + startDate <- (attr \ "startDate").validate[Date] + attachmentReference <- (attr \ "remoteAttachment" \ "reference").validateOpt[String] + attachmentType <- (attr \ "remoteAttachment" \ "type").validateOpt[String] attachment = attachmentReference .flatMap { - case ref if dataType == "file" ⇒ Some(downloadAttachment(mispConnection, ref)) - case _ ⇒ None + case ref if dataType == "file" => Some(downloadAttachment(mispConnection, ref)) + case _ => None } .map { - case f if attachmentType.contains("malware-sample") ⇒ f.map(extractMalwareAttachment) - case f ⇒ f + case f if attachmentType.contains("malware-sample") => f.map(extractMalwareAttachment) + case f => f } tags = (attr \ "tags").asOpt[Seq[String]].getOrElse(Nil) tlp = tags .map(_.toLowerCase) .collectFirst { - case "tlp:white" ⇒ JsNumber(0) - case "tlp:green" ⇒ JsNumber(1) - case "tlp:amber" ⇒ JsNumber(2) - case "tlp:red" ⇒ JsNumber(3) + case "tlp:white" => JsNumber(0) + case "tlp:green" => JsNumber(1) + case "tlp:amber" => JsNumber(2) + case "tlp:red" => JsNumber(3) } .getOrElse(JsNumber(defaultTlp)) fields = Fields @@ -194,38 +194,38 @@ class MispSrv @Inject()( ) .set("tlp", tlp) if (attachment.isDefined && data.isEmpty) || (dataType != "file" && data.isDefined) - } yield attachment.fold(Future.successful(fields.set("data", data.get)))(_.map { fiv ⇒ + } yield attachment.fold(Future.successful(fields.set("data", data.get)))(_.map { fiv => fields.set("attachment", fiv) })) match { - case JsSuccess(r, _) ⇒ Some(r) - case e: JsError ⇒ + case JsSuccess(r, _) => Some(r) + case e: JsError => logger.warn(s"Invalid attribute format: $e\n$attr") None } def createCase(alert: Alert, customCaseTemplate: Option[String])(implicit authContext: AuthContext, ec: ExecutionContext): Future[Case] = alert.caze() match { - case Some(id) ⇒ caseSrv.get(id) - case None ⇒ + case Some(id) => caseSrv.get(id) + case None => for { - caseTemplate ← alertSrv.getCaseTemplate(customCaseTemplate) - caze ← caseSrv.create(Fields(alert.toCaseJson), caseTemplate) - _ ← importArtifacts(alert, caze) + caseTemplate <- alertSrv.getCaseTemplate(customCaseTemplate) + caze <- caseSrv.create(Fields(alert.toCaseJson), caseTemplate) + _ <- importArtifacts(alert, caze) } yield caze } def importArtifacts(alert: Alert, caze: Case)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Case] = for { - instanceConfig ← getInstanceConfig(alert.source()) - artifacts ← Future.sequence(alert.artifacts().flatMap(attributeToArtifact(instanceConfig, _, alert.tlp()))) - _ ← artifactSrv.create(caze, artifacts) + instanceConfig <- getInstanceConfig(alert.source()) + artifacts <- Future.sequence(alert.artifacts().flatMap(attributeToArtifact(instanceConfig, _, alert.tlp()))) + _ <- artifactSrv.create(caze, artifacts) } yield caze def mergeWithCase(alert: Alert, caze: Case)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Case] = for { - _ ← importArtifacts(alert, caze) + _ <- importArtifacts(alert, caze) description = caze.description() + s"\n \n#### Merged with MISP event ${alert.title()}\n\n${alert.description().trim}" - updatedCase ← caseSrv.update(caze, Fields.empty.set("description", description)) + updatedCase <- caseSrv.update(caze, Fields.empty.set("description", description)) } yield updatedCase def updateMispAlertArtifact()(implicit authContext: AuthContext, ec: ExecutionContext): Future[Unit] = { @@ -233,38 +233,38 @@ class MispSrv @Inject()( logger.info("Update MISP attributes in alerts") val (alerts, _) = alertSrv.find("type" ~= "misp", Some("all"), Nil) alerts - .mapAsyncUnordered(5) { alert ⇒ + .mapAsyncUnordered(5) { alert => if (alert.artifacts().nonEmpty) { logger.info(s"alert ${alert.id} has artifacts, ignore it") - Future.successful(alert → Nil) + Future.successful(alert -> Nil) } else { getInstanceConfig(alert.source()) - .flatMap { mcfg ⇒ + .flatMap { mcfg => getAttributesFromMisp(mcfg, alert.sourceRef(), None) } - .map(alert → _) + .map(alert -> _) .recover { - case NotFoundError(m) ⇒ + case NotFoundError(m) => logger.error(s"Retrieve MISP attribute of event ${alert.id} error: $m") - alert → Nil - case error ⇒ + alert -> Nil + case error => logger.error(s"Retrieve MISP attribute of event ${alert.id} error", error) - alert → Nil + alert -> Nil } } } .filterNot(_._2.isEmpty) .mapAsyncUnordered(5) { - case (alert, artifacts) ⇒ + case (alert, artifacts) => logger.info(s"Updating alert ${alert.id}") alertSrv .update(alert.id, Fields.empty.set("artifacts", Json.toJson(artifacts))) .recover { - case t ⇒ logger.error(s"Update alert ${alert.id} fail", t) + case t => logger.error(s"Update alert ${alert.id} fail", t) } } .runWith(Sink.ignore) - .map(_ ⇒ ()) + .map(_ => ()) } def extractMalwareAttachment(file: FileInputValue)(implicit authContext: AuthContext): FileInputValue = { @@ -277,11 +277,11 @@ class MispSrv @Inject()( // Get the list of file headers from the zip file val fileHeaders = zipFile.getFileHeaders.asScala.toList - val (fileNameHeaders, contentFileHeaders) = fileHeaders.partition { fileHeader ⇒ + val (fileNameHeaders, contentFileHeaders) = fileHeaders.partition { fileHeader => fileHeader.getFileName.endsWith(".filename.txt") } (for { - fileNameHeader ← fileNameHeaders + fileNameHeader <- fileNameHeaders .headOption .orElse { logger.warn(s"Format of malware attribute ${file.name} is invalid : file containing filename not found") @@ -291,7 +291,7 @@ class MispSrv @Inject()( len = zipFile.getInputStream(fileNameHeader).read(buffer) filename = new String(buffer, 0, len) - contentFileHeader ← contentFileHeaders + contentFileHeader <- contentFileHeaders .headOption .orElse { logger.warn(s"Format of malware attribute ${file.name} is invalid : content file not found") @@ -303,7 +303,7 @@ class MispSrv @Inject()( _ = zipFile.extractFile(contentFileHeader, tempFile.getParent.toString, tempFile.getFileName.toString) } yield FileInputValue(filename, tempFile, "application/octet-stream")).getOrElse(file) } catch { - case e: ZipException ⇒ + case e: ZipException => logger.warn(s"Format of malware attribute ${file.name} is invalid : zip file is unreadable", e) file } @@ -319,21 +319,21 @@ class MispSrv @Inject()( .withMethod("GET") .stream() .flatMap { - case response if response.status != 200 ⇒ + case response if response.status != 200 => val status = response.status logger.warn(s"MISP attachment $attachmentId can't be downloaded (status $status) : ${response.body}") Future.failed(InternalError(s"MISP attachment $attachmentId can't be downloaded (status $status)")) - case response ⇒ + case response => val tempFile = tempSrv.newTemporaryFile("misp_attachment", attachmentId) response .bodyAsSource .runWith(FileIO.toPath(tempFile)) - .map { _ ⇒ + .map { _ => val contentType = response.headers.getOrElse("Content-Type", Seq("application/octet-stream")).head val filename = response .headers .get("Content-Disposition") - .flatMap(_.collectFirst { case fileNameExtractor(name) ⇒ name }) + .flatMap(_.collectFirst { case fileNameExtractor(name) => name }) .getOrElse("noname") FileInputValue(filename, tempFile, contentType) } diff --git a/thehive-misp/app/connectors/misp/MispSynchro.scala b/thehive-misp/app/connectors/misp/MispSynchro.scala index f7150a6af2..508a09e4cc 100644 --- a/thehive-misp/app/connectors/misp/MispSynchro.scala +++ b/thehive-misp/app/connectors/misp/MispSynchro.scala @@ -49,26 +49,26 @@ class MispSynchro @Inject()( } private[misp] def initScheduler(): Unit = { - val task = system.scheduler.scheduleWithFixedDelay(0.seconds, mispConfig.interval) {() => + val task = system.scheduler.scheduleWithFixedDelay(0.seconds, mispConfig.interval) { () => if (migrationSrv.isReady) { logger.info("Update of MISP events is starting ...") userSrv - .inInitAuthContext { implicit authContext ⇒ - synchronize().andThen { case _ ⇒ tempSrv.releaseTemporaryFiles() } + .inInitAuthContext { implicit authContext => + synchronize().andThen { case _ => tempSrv.releaseTemporaryFiles() } } .onComplete { - case Success(a) ⇒ + case Success(a) => logger.info("Misp synchronization completed") a.collect { - case Failure(t) ⇒ logger.warn(s"Update MISP error", t) + case Failure(t) => logger.warn(s"Update MISP error", t) } - case Failure(t) ⇒ logger.info("Misp synchronization failed", t) + case Failure(t) => logger.info("Misp synchronization failed", t) } } else { logger.info("MISP synchronization cancel, database is not ready") } } - lifecycle.addStopHook { () ⇒ + lifecycle.addStopHook { () => logger.info("Stopping MISP fetching ...") task.cancel() Future.successful(()) @@ -83,26 +83,26 @@ class MispSynchro @Inject()( // for each MISP server Source(mispConfig.connections.filter(_.canImport).toList) // get last synchronization - .mapAsyncUnordered(1) { mispConnection ⇒ + .mapAsyncUnordered(1) { mispConnection => alertSrv .stats(and("type" ~= "misp", "source" ~= mispConnection.name), Seq(selectMax("lastSyncDate"))) - .map { maxLastSyncDate ⇒ - mispConnection → new Date((maxLastSyncDate \ "max_lastSyncDate").as[Long]) + .map { maxLastSyncDate => + mispConnection -> new Date((maxLastSyncDate \ "max_lastSyncDate").as[Long]) } - .recover { case _ ⇒ mispConnection → new Date(0) } + .recover { case _ => mispConnection -> new Date(0) } } .flatMapConcat { - case (mispConnection, lastSyncDate) ⇒ + case (mispConnection, lastSyncDate) => synchronize(mispConnection, Some(lastSyncDate)) } - .withAttributes(ActorAttributes.supervisionStrategy(_ ⇒ Supervision.Resume)) + .withAttributes(ActorAttributes.supervisionStrategy(_ => Supervision.Resume)) .runWith(Sink.seq) } def fullSynchronize()(implicit authContext: AuthContext): Future[immutable.Seq[Try[Alert]]] = Source(mispConfig.connections.filter(_.canImport).toList) - .flatMapConcat(mispConnection ⇒ synchronize(mispConnection, None)) - .withAttributes(ActorAttributes.supervisionStrategy(_ ⇒ Supervision.Resume)) + .flatMapConcat(mispConnection => synchronize(mispConnection, None)) + .withAttributes(ActorAttributes.supervisionStrategy(_ => Supervision.Resume)) .runWith(Sink.seq) def updateArtifacts(mispConnection: MispConnection, caseId: String, mispArtifacts: Seq[MispArtifact])( @@ -112,40 +112,40 @@ class MispSynchro @Inject()( for { // Either data or filename - existingArtifacts: Seq[Either[String, String]] ← artifactSrv + existingArtifacts: Seq[Either[String, String]] <- artifactSrv .find(and(withParent("case", caseId), "status" ~= "Ok"), Some("all"), Nil) ._1 - .map { artifact ⇒ + .map { artifact => artifact.data().map(Left.apply).getOrElse(Right(artifact.attachment().get.name)) } - .withAttributes(ActorAttributes.supervisionStrategy(_ ⇒ Supervision.Resume)) + .withAttributes(ActorAttributes.supervisionStrategy(_ => Supervision.Resume)) .runWith(Sink.seq) - newAttributes ← Future.traverse(mispArtifacts) { - case artifact @ MispArtifact(SimpleArtifactData(data), _, _, _, _, _, _) if !existingArtifacts.contains(Right(data)) ⇒ + newAttributes <- Future.traverse(mispArtifacts) { + case artifact @ MispArtifact(SimpleArtifactData(data), _, _, _, _, _, _) if !existingArtifacts.contains(Right(data)) => Future.successful(Fields(Json.toJson(artifact).as[JsObject])) case artifact @ MispArtifact(AttachmentArtifact(Attachment(filename, _, _, _, _)), _, _, _, _, _, _) - if !existingArtifacts.contains(Left(filename)) ⇒ + if !existingArtifacts.contains(Left(filename)) => Future.successful(Fields(Json.toJson(artifact).as[JsObject])) case artifact @ MispArtifact(RemoteAttachmentArtifact(filename, reference, tpe), _, _, _, _, _, _) - if !existingArtifacts.contains(Left(filename)) ⇒ + if !existingArtifacts.contains(Left(filename)) => mispSrv .downloadAttachment(mispConnection, reference) .map { - case fiv if tpe == "malware-sample" ⇒ mispSrv.extractMalwareAttachment(fiv) - case fiv ⇒ fiv + case fiv if tpe == "malware-sample" => mispSrv.extractMalwareAttachment(fiv) + case fiv => fiv } - .map(fiv ⇒ Fields(Json.toJson(artifact).as[JsObject]).unset("remoteAttachment").set("attachment", fiv)) - case _ ⇒ Future.successful(Fields.empty) + .map(fiv => Fields(Json.toJson(artifact).as[JsObject]).unset("remoteAttachment").set("attachment", fiv)) + case _ => Future.successful(Fields.empty) } - createdArtifacts ← artifactSrv.create(caseId, newAttributes.filterNot(_.isEmpty)) + createdArtifacts <- artifactSrv.create(caseId, newAttributes.filterNot(_.isEmpty)) } yield createdArtifacts } def getOriginalEvent(mispConnection: MispConnection, event: MispAlert): Future[MispAlert] = event.extendsUuid match { - case None ⇒ Future.successful(event) - case Some(e) if e.isEmpty ⇒ Future.successful(event) - case Some(originalEvent) ⇒ + case None => Future.successful(event) + case Some(e) if e.isEmpty => Future.successful(event) + case Some(originalEvent) => mispSrv .getEvent(mispConnection, originalEvent) .flatMap(getOriginalEvent(mispConnection, _)) @@ -158,61 +158,61 @@ class MispSynchro @Inject()( mispSrv .getEventsFromDate(mispConnection, syncFrom) // get related alert - .mapAsyncUnordered(1) { event ⇒ + .mapAsyncUnordered(1) { event => logger.trace(s"Looking for alert misp:${event.source}:${event.sourceRef}") getOriginalEvent(mispConnection, event) - .flatMap { originalEvent ⇒ + .flatMap { originalEvent => logger.trace(s"Event misp:${event.source}:${event.sourceRef} is originated from misp:${originalEvent.source}:${originalEvent.sourceRef}") alertSrv.get("misp", mispConnection.name, originalEvent.sourceRef) } .map((event, _)) } .mapAsyncUnordered(1) { - case (event, alert) ⇒ + case (event, alert) => logger.trace(s"MISP synchro ${mispConnection.name}, event ${event.sourceRef}, alert ${alert - .fold("no alert")(a ⇒ "alert " + a.alertId() + "last sync at " + a.lastSyncDate())}") + .fold("no alert")(a => "alert " + a.alertId() + "last sync at " + a.lastSyncDate())}") logger.debug(s"getting MISP event ${event.source}:${event.sourceRef}") mispSrv - .getAttributesFromMisp(mispConnection, event.sourceRef, lastSyncDate.flatMap(_ ⇒ alert.map(_.lastSyncDate()))) + .getAttributesFromMisp(mispConnection, event.sourceRef, lastSyncDate.flatMap(_ => alert.map(_.lastSyncDate()))) .map((event, alert, _)) } .filter { // attrs is empty if the size of the http response exceed the configured limit (max-size) - case (_, _, attrs) if attrs.isEmpty ⇒ false - case (event, _, attrs) if mispConnection.maxAttributes.fold(false)(attrs.lengthCompare(_) > 0) ⇒ + case (_, _, attrs) if attrs.isEmpty => false + case (event, _, attrs) if mispConnection.maxAttributes.fold(false)(attrs.lengthCompare(_) > 0) => logger.debug(s"Event ${event.sourceRef} ignore because it has too many attributes (${attrs.length}>${mispConnection.maxAttributes.get})") false - case _ ⇒ true + case _ => true } .mapAsyncUnordered(1) { // if there is no related alert, create a new one - case (event, None, attrs) ⇒ + case (event, None, attrs) => logger.debug(s"MISP event ${event.source}:${event.sourceRef} has no related alert, create it with ${attrs.size} observable(s)") val alertJson = Json.toJson(event).as[JsObject] + - ("type" → JsString("misp")) + - ("caseTemplate" → mispConnection.caseTemplate.fold[JsValue](JsNull)(JsString)) + - ("artifacts" → Json.toJson(attrs)) + ("type" -> JsString("misp")) + + ("caseTemplate" -> mispConnection.caseTemplate.fold[JsValue](JsNull)(JsString)) + + ("artifacts" -> Json.toJson(attrs)) alertSrv .create(Fields(alertJson)) .map(Success(_)) - .recover { case t ⇒ Failure(t) } + .recover { case t => Failure(t) } - case (event, Some(alert), attrs) ⇒ + case (event, Some(alert), attrs) => logger.debug(s"MISP event ${event.source}:${event.sourceRef} has related alert, update it with ${attrs.size} observable(s)") alert .caze() .fold[Future[Boolean]](Future.successful(lastSyncDate.isDefined && attrs.nonEmpty && alert.follow())) { - case caze if alert.follow() ⇒ + case caze if alert.follow() => for { - addedArtifacts ← updateArtifacts(mispConnection, caze, attrs) + addedArtifacts <- updateArtifacts(mispConnection, caze, attrs) updateStatus = lastSyncDate.nonEmpty && addedArtifacts.exists(_.isSuccess) - _ ← if (updateStatus) caseSrv.update(caze, Fields.empty.set("status", CaseStatus.Open.toString)) else Future.successful(()) + _ <- if (updateStatus) caseSrv.update(caze, Fields.empty.set("status", CaseStatus.Open.toString)) else Future.successful(()) } yield updateStatus - case _ ⇒ Future.successful(false) + case _ => Future.successful(false) } - .flatMap { updateStatus ⇒ - val artifacts = Collection.distinctBy(alert.artifacts() ++ attrs.map(Json.toJson(_))) { a ⇒ + .flatMap { updateStatus => + val artifacts = Collection.distinctBy(alert.artifacts() ++ attrs.map(Json.toJson(_))) { a => (a \ "data").getOrElse(JsNull).toString + (a \ "dataType").getOrElse(JsNull).toString + (a \ "attachment").getOrElse(JsNull).toString + @@ -224,18 +224,18 @@ class MispSynchro @Inject()( "sourceRef" - "caseTemplate" - "date" + - ("artifacts" → JsArray(artifacts)) + - ("status" → (if (!updateStatus) Json.toJson(alert.status()) + ("artifacts" -> JsArray(artifacts)) + + ("status" -> (if (!updateStatus) Json.toJson(alert.status()) else alert.status() match { - case AlertStatus.New ⇒ Json.toJson(AlertStatus.New) - case _ ⇒ Json.toJson(AlertStatus.Updated) + case AlertStatus.New => Json.toJson(AlertStatus.New) + case _ => Json.toJson(AlertStatus.Updated) })) logger.debug(s"Update alert ${alert.id} with\n$alertJson") alertSrv.update(alert.id, Fields(alertJson)) } .map(Success(_)) - .recover { case t ⇒ Failure(t) } + .recover { case t => Failure(t) } } } } diff --git a/thehive-misp/app/connectors/misp/UpdateMispAlertArtifactActor.scala b/thehive-misp/app/connectors/misp/UpdateMispAlertArtifactActor.scala index adef8c66cf..89864b6a1a 100644 --- a/thehive-misp/app/connectors/misp/UpdateMispAlertArtifactActor.scala +++ b/thehive-misp/app/connectors/misp/UpdateMispAlertArtifactActor.scala @@ -38,18 +38,18 @@ class UpdateMispAlertArtifactActor @Inject()(eventSrv: EventSrv, userSrv: UserSr } override def receive: Receive = { - case UpdateMispAlertArtifact() ⇒ + case UpdateMispAlertArtifact() => logger.info("UpdateMispAlertArtifact") userSrv - .inInitAuthContext { implicit authContext ⇒ + .inInitAuthContext { implicit authContext => mispSrv.updateMispAlertArtifact() } .onComplete { - case Success(_) ⇒ logger.info("Artifacts in MISP alerts updated") - case Failure(error) ⇒ logger.error("Update MISP alert artifacts error :", error) + case Success(_) => logger.info("Artifacts in MISP alerts updated") + case Failure(error) => logger.error("Update MISP alert artifacts error :", error) } () - case msg ⇒ + case msg => logger.info(s"Receiving unexpected message: $msg (${msg.getClass})") } } diff --git a/ui/app/index.html b/ui/app/index.html index 276ad53014..31368bbd98 100644 --- a/ui/app/index.html +++ b/ui/app/index.html @@ -168,6 +168,7 @@ + diff --git a/ui/app/scripts/controllers/alert/AlertListCtrl.js b/ui/app/scripts/controllers/alert/AlertListCtrl.js index 83740c9e86..6ad4b3abc2 100755 --- a/ui/app/scripts/controllers/alert/AlertListCtrl.js +++ b/ui/app/scripts/controllers/alert/AlertListCtrl.js @@ -241,29 +241,30 @@ } }; - this.getResponders = function(eventId, force) { + this.getResponders = function(event, force) { if(!force && this.responders !== null) { return; } this.responders = null; - CortexSrv.getResponders('alert', eventId) + CortexSrv.getResponders('alert', event.id) .then(function(responders) { self.responders = responders; + return CortexSrv.promntForResponder(responders); }) - .catch(function(err) { - NotificationSrv.error('AlertList', err.data, err.status); - }); - }; - - this.runResponder = function(responderId, responderName, event) { - CortexSrv.runResponder(responderId, responderName, 'alert', _.pick(event, 'id', 'tlp')) .then(function(response) { + if(response && _.isString(response)) { + NotificationSrv.log(response, 'warning'); + } else { + return CortexSrv.runResponder(response.id, response.name, 'alert', _.pick(event, 'id', 'tlp')); + } + }) + .then(function(response){ NotificationSrv.log(['Responder', response.data.responderName, 'started successfully on alert', event.title].join(' '), 'success'); }) - .catch(function(response) { - if(response && !_.isString(response)) { - NotificationSrv.error('CaseList', response.data, response.status); + .catch(function(err) { + if(err && !_.isString(err)) { + NotificationSrv.error('AlertList', err.data, err.status); } }); }; diff --git a/ui/app/scripts/controllers/case/CaseDetailsCtrl.js b/ui/app/scripts/controllers/case/CaseDetailsCtrl.js index 8573a3a07a..cca226dbbd 100644 --- a/ui/app/scripts/controllers/case/CaseDetailsCtrl.js +++ b/ui/app/scripts/controllers/case/CaseDetailsCtrl.js @@ -25,12 +25,23 @@ '_parent': { '_type': 'case_task', '_query': { - '_parent': { - '_type': 'case', - '_query': { - '_id': $scope.caseId + _and: [ + { + '_parent': { + '_type': 'case', + '_query': { + '_id': $scope.caseId + } + } + }, + { + _not: { + status: 'Cancel' + } } - } + ] + + } } } @@ -95,7 +106,7 @@ $scope.openAttachment = function(attachment) { $state.go('app.case.tasks-item', { caseId: $scope.caze.id, - itemId: attachment.case_task.id + itemId: attachment.case_task ? attachment.case_task.id : attachment._parent }); }; diff --git a/ui/app/scripts/controllers/case/CaseListCtrl.js b/ui/app/scripts/controllers/case/CaseListCtrl.js index 933bb964f0..7e7fd55f68 100644 --- a/ui/app/scripts/controllers/case/CaseListCtrl.js +++ b/ui/app/scripts/controllers/case/CaseListCtrl.js @@ -235,19 +235,32 @@ this.uiSrv.setSort(sort); }; - this.getCaseResponders = function(caseId, force) { + this.getCaseResponders = function(caze, force) { if(!force && this.caseResponders !== null) { return; } this.caseResponders = null; - CortexSrv.getResponders('case', caseId) - .then(function(responders) { - self.caseResponders = responders; - }) - .catch(function(err) { - NotificationSrv.error('CaseList', err.data, err.status); - }); + CortexSrv.getResponders('case', caze.id) + .then(function(responders) { + self.caseResponders = responders; + return CortexSrv.promntForResponder(responders); + }) + .then(function(response) { + if(response && _.isString(response)) { + NotificationSrv.log(response, 'warning'); + } else { + return CortexSrv.runResponder(response.id, response.name, 'case', _.pick(caze, 'id', 'tlp', 'pap')); + } + }) + .then(function(response){ + NotificationSrv.log(['Responder', response.data.responderName, 'started successfully on case', caze.title].join(' '), 'success'); + }) + .catch(function(err) { + if(err && !_.isString(err)) { + NotificationSrv.error('CaseList', err.data, err.status); + } + }); }; this.runResponder = function(responderId, responderName, caze) { diff --git a/ui/app/scripts/controllers/case/CaseMainCtrl.js b/ui/app/scripts/controllers/case/CaseMainCtrl.js index 07fa4844fc..b5fe73a224 100644 --- a/ui/app/scripts/controllers/case/CaseMainCtrl.js +++ b/ui/app/scripts/controllers/case/CaseMainCtrl.js @@ -332,24 +332,25 @@ $scope.caseResponders = null; CortexSrv.getResponders('case', $scope.caseId) - .then(function(responders) { - $scope.caseResponders = responders; - }) - .catch(function(response) { - NotificationSrv.error('caseDetails', response.data, response.status); - }); - }; - - $scope.runResponder = function(responderId, responderName) { - CortexSrv.runResponder(responderId, responderName, 'case', _.pick($scope.caze, 'id', 'tlp', 'pap')) - .then(function(response) { - NotificationSrv.log(['Responder', response.data.responderName, 'started successfully on case', $scope.caze.title].join(' '), 'success'); - }) - .catch(function(response) { - if(response && !_.isString(response)) { - NotificationSrv.error('caseDetails', response.data, response.status); - } - }); + .then(function(responders){ + $scope.caseResponders = responders; + return CortexSrv.promntForResponder(responders); + }) + .then(function(response) { + if(response && _.isString(response)) { + NotificationSrv.log(response, 'warning'); + } else { + return CortexSrv.runResponder(response.id, response.name, 'case', _.pick($scope.caze, 'id', 'tlp', 'pap')); + } + }) + .then(function(response){ + NotificationSrv.log(['Responder', response.data.responderName, 'started successfully on case', $scope.caze.title].join(' '), 'success'); + }) + .catch(function(err) { + if(err && !_.isString(err)) { + NotificationSrv.error('caseDetails', err.data, err.status); + } + }); }; /** diff --git a/ui/app/scripts/controllers/case/CaseObservablesCtrl.js b/ui/app/scripts/controllers/case/CaseObservablesCtrl.js index 8ae2158deb..b7cdd7c4b1 100644 --- a/ui/app/scripts/controllers/case/CaseObservablesCtrl.js +++ b/ui/app/scripts/controllers/case/CaseObservablesCtrl.js @@ -674,7 +674,7 @@ size: 'max', resolve: { report: function() { - return report + return report; }, observable: function() { return observable; @@ -682,35 +682,36 @@ } }); }) - .catch(function(err) { + .catch(function(/*err*/) { NotificationSrv.error('Unable to fetch the analysis report'); }); }; - $scope.getObsResponders = function(observableId, force) { + $scope.getObsResponders = function(observable, force) { if(!force && $scope.obsResponders !== null) { return; } $scope.obsResponders = null; - CortexSrv.getResponders('case_artifact', observableId) + CortexSrv.getResponders('case_artifact', observable.id) .then(function(responders) { $scope.obsResponders = responders; + return CortexSrv.promntForResponder(responders); }) - .catch(function(err) { - NotificationSrv.error('observablesList', err.data, err.status); - }); - }; - - $scope.runResponder = function(responderId, responderName, artifact) { - CortexSrv.runResponder(responderId, responderName, 'case_artifact', _.pick(artifact, 'id')) .then(function(response) { - var data = '['+$filter('fang')(artifact.data || artifact.attachment.name)+']'; + if(response && _.isString(response)) { + NotificationSrv.log(response, 'warning'); + } else { + return CortexSrv.runResponder(response.id, response.name, 'case_artifact', _.pick(observable, 'id')); + } + }) + .then(function(response){ + var data = '['+$filter('fang')(observable.data || observable.attachment.name)+']'; NotificationSrv.log(['Responder', response.data.responderName, 'started successfully on observable', data].join(' '), 'success'); }) - .catch(function(response) { - if(response && !_.isString(response)) { - NotificationSrv.error('observablesList', response.data, response.status); + .catch(function(err) { + if(err && !_.isString(err)) { + NotificationSrv.error('observablesList', err.data, err.status); } }); }; diff --git a/ui/app/scripts/controllers/case/CaseObservablesItemCtrl.js b/ui/app/scripts/controllers/case/CaseObservablesItemCtrl.js index 589ff7328d..245cbbddc0 100644 --- a/ui/app/scripts/controllers/case/CaseObservablesItemCtrl.js +++ b/ui/app/scripts/controllers/case/CaseObservablesItemCtrl.js @@ -240,33 +240,34 @@ }); }; - $scope.getObsResponders = function(observableId, force) { + $scope.getObsResponders = function(observable, force) { if(!force && $scope.obsResponders !== null) { return; } $scope.obsResponders = null; - CortexSrv.getResponders('case_artifact', observableId) + CortexSrv.getResponders('case_artifact', observable.id) .then(function(responders) { $scope.obsResponders = responders; + return CortexSrv.promntForResponder(responders); }) - .catch(function(err) { - NotificationSrv.error('observablesList', err.data, err.status); - }); - }; - - $scope.runResponder = function(responderId, responderName, artifact) { - CortexSrv.runResponder(responderId, responderName, 'case_artifact', _.pick(artifact, 'id')) .then(function(response) { - var data = '['+$filter('fang')(artifact.data || artifact.attachment.name)+']'; + if(response && _.isString(response)) { + NotificationSrv.log(response, 'warning'); + } else { + return CortexSrv.runResponder(response.id, response.name, 'case_artifact', _.pick(observable, 'id')); + } + }) + .then(function(response){ + var data = '['+$filter('fang')(observable.data || observable.attachment.name)+']'; NotificationSrv.log(['Responder', response.data.responderName, 'started successfully on observable', data].join(' '), 'success'); }) - .catch(function(response) { - if(response && !_.isString(response)) { - NotificationSrv.error('observablesList', response.data, response.status); + .catch(function(err) { + if(err && !_.isString(err)) { + NotificationSrv.error('Observable Details', err.data, err.status); } }); - }; + }; $scope.getTags = function(query) { return TagSrv.fromObservables(query); diff --git a/ui/app/scripts/controllers/case/CaseTasksCtrl.js b/ui/app/scripts/controllers/case/CaseTasksCtrl.js index 82221fac2f..e060f43005 100755 --- a/ui/app/scripts/controllers/case/CaseTasksCtrl.js +++ b/ui/app/scripts/controllers/case/CaseTasksCtrl.js @@ -174,32 +174,33 @@ return defer.promise; }; - $scope.getTaskResponders = function(taskId, force) { + $scope.getTaskResponders = function(task, force) { if(!force && $scope.taskResponders !== null) { return; } $scope.taskResponders = null; - CortexSrv.getResponders('case_task', taskId) - .then(function(responders) { - $scope.taskResponders = responders; - }) - .catch(function(err) { - NotificationSrv.error('taskList', response.data, response.status); - }) - }; - - $scope.runResponder = function(responderId, responderName, task) { - CortexSrv.runResponder(responderId, responderName, 'case_task', _.pick(task, 'id')) - .then(function(response) { - NotificationSrv.log(['Responder', response.data.responderName, 'started successfully on task', task.title].join(' '), 'success'); - }) - .catch(function(response) { - if(response && !_.isString(response)) { - NotificationSrv.error('taskList', response.data, response.status); - } - }); - }; + CortexSrv.getResponders('case_task', task.id) + .then(function(responders) { + $scope.taskResponders = responders; + return CortexSrv.promntForResponder(responders); + }) + .then(function(response) { + if (response && _.isString(response)) { + NotificationSrv.log(response, 'warning'); + } else { + return CortexSrv.runResponder(response.id, response.name, 'case_task', _.pick(task, 'id')); + } + }) + .then(function(response) { + NotificationSrv.success(['Responder', response.data.responderName, 'started successfully on task', task.title].join(' ')); + }) + .catch(function(err) { + if (err && !_.isString(err)) { + NotificationSrv.error('taskList', err.data, err.status); + } + }); + }; } function CaseTaskDeleteCtrl($uibModalInstance, title) { diff --git a/ui/app/scripts/controllers/case/CaseTasksItemCtrl.js b/ui/app/scripts/controllers/case/CaseTasksItemCtrl.js index 5139acf4ad..888d2755d3 100644 --- a/ui/app/scripts/controllers/case/CaseTasksItemCtrl.js +++ b/ui/app/scripts/controllers/case/CaseTasksItemCtrl.js @@ -216,20 +216,21 @@ CortexSrv.getResponders('case_task', $scope.task.id) .then(function(responders) { $scope.taskResponders = responders; + return CortexSrv.promntForResponder(responders); }) - .catch(function(err) { - NotificationSrv.error('taskDetails', response.data, response.status); - }) - }; - - $scope.runResponder = function(responderId, responderName) { - CortexSrv.runResponder(responderId, responderName, 'case_task', _.pick($scope.task, 'id')) .then(function(response) { + if(response && _.isString(response)) { + NotificationSrv.log(response, 'warning'); + } else { + return CortexSrv.runResponder(response.id, response.name, 'case_task', _.pick($scope.task, 'id')); + } + }) + .then(function(response){ NotificationSrv.log(['Responder', response.data.responderName, 'started successfully on task', $scope.task.title].join(' '), 'success'); }) - .catch(function(response) { - if(response && !_.isString(response)) { - NotificationSrv.error('taskDetails', response.data, response.status); + .catch(function(err) { + if(err && !_.isString(err)) { + NotificationSrv.error('taskDetails', err.data, err.status); } }); }; @@ -242,7 +243,7 @@ }, function(response) { NotificationSrv.error('taskDetails', response.data, response.status); }); - } + }; // Add tabs CaseTabsSrv.addTab($scope.tabName, { diff --git a/ui/app/scripts/controllers/misc/ResponderSelectorCtrl.js b/ui/app/scripts/controllers/misc/ResponderSelectorCtrl.js new file mode 100644 index 0000000000..b1d3b0a93b --- /dev/null +++ b/ui/app/scripts/controllers/misc/ResponderSelectorCtrl.js @@ -0,0 +1,20 @@ +(function() { + 'use strict'; + + angular.module('theHiveControllers') + .controller('ResponderSelectorCtrl', function($uibModalInstance, responders) { + this.responders = responders || []; + this.selectAll = false; + this.state = { + filter: '' + }; + + this.next = function(responder) { + $uibModalInstance.close(responder); + }; + + this.cancel = function() { + $uibModalInstance.dismiss(); + }; + }); +})(); diff --git a/ui/app/scripts/directives/logEntry.js b/ui/app/scripts/directives/logEntry.js index ef779202b6..487d08e01e 100644 --- a/ui/app/scripts/directives/logEntry.js +++ b/ui/app/scripts/directives/logEntry.js @@ -8,28 +8,34 @@ $scope.showActions = false; $scope.actions = null; $scope.logResponders = null; - $scope.getLogResponders = function(logId) { + + $scope.getLogResponders = function(taskLog, force) { + if(!force && $scope.logResponders !== null) { + return; + } + $scope.logResponders = null; - CortexSrv.getResponders('case_task_log', logId) + CortexSrv.getResponders('case_task_log', taskLog.id) .then(function(responders) { $scope.logResponders = responders; + return CortexSrv.promntForResponder(responders); + }) + .then(function(response) { + if(response && _.isString(response)) { + NotificationSrv.log(response, 'warning'); + } else { + return CortexSrv.runResponder(response.id, response.name, 'case_task_log', _.pick(taskLog, 'id')); + } + }) + .then(function(response){ + NotificationSrv.log(['Responder', response.data.responderName, 'started successfully on task log'].join(' '), 'success'); }) - .catch(function(response) { - NotificationSrv.error('logEntry', response.data, response.status); + .catch(function(err) { + if(err && !_.isString(err)) { + NotificationSrv.error('logEntry', err.data, err.status); + } }); - }; - - $scope.runResponder = function(responderId, responderName, log) { - CortexSrv.runResponder(responderId, responderName, 'case_task_log', _.pick(log, 'id')) - .then(function(response) { - NotificationSrv.log(['Responder', response.data.responderName, 'started successfully on task log'].join(' '), 'success'); - }) - .catch(function(response) { - if(response && !_.isString(response)) { - NotificationSrv.error('logEntry', response.data, response.status); - } - }); - }; + }; $scope.getActions = function(logId) { $scope.actions = PSearchSrv(null, 'connector/cortex/action', { diff --git a/ui/app/scripts/directives/responder-actions.js b/ui/app/scripts/directives/responder-actions.js index e288bd2e1c..f5c9f31dd3 100644 --- a/ui/app/scripts/directives/responder-actions.js +++ b/ui/app/scripts/directives/responder-actions.js @@ -10,10 +10,16 @@ }, templateUrl: 'views/directives/responder-actions.html', controller: function($scope, $uibModal) { - _.each($scope.actions.values, function(action) { - if(action.status === 'Failure') { - action.errorMessage = (JSON.parse(action.report) || {}).errorMessage; + $scope.$watchCollection('actions.values', function(list) { + if(!list) { + return; } + + _.each(_.isArray(list) ? list : list.values, function(action) { + if (action.status === 'Failure') { + action.errorMessage = (JSON.parse(action.report) || {}).errorMessage; + } + }); }); $scope.showResponderJob = function(action) { diff --git a/ui/app/scripts/services/CortexSrv.js b/ui/app/scripts/services/CortexSrv.js index 8ab5bb9be4..4be528449b 100644 --- a/ui/app/scripts/services/CortexSrv.js +++ b/ui/app/scripts/services/CortexSrv.js @@ -96,6 +96,27 @@ return modalInstance.result; }; + this.promntForResponder = function(responders) { + if(!responders || responders.length ===0) { + return $q.resolve('No responders available'); + } + + var modalInstance = $uibModal.open({ + animation: 'true', + templateUrl: 'views/partials/misc/responder.selector.html', + controller: 'ResponderSelectorCtrl', + controllerAs: '$dialog', + size: 'lg', + resolve: { + responders: function() { + return responders; + } + } + }); + + return modalInstance.result; + }; + this.getResponders = function(type, id) { return $http.get(baseUrl + '/responder/' + type + '/' + id) .then(function(response) { diff --git a/ui/app/styles/main.css b/ui/app/styles/main.css index ffca7d5f1f..71172310ba 100644 --- a/ui/app/styles/main.css +++ b/ui/app/styles/main.css @@ -249,6 +249,10 @@ pre.clearpre { font-weight: normal !important; } +.label-critical { + background-color: #8a0000 !important; +} + .progress.progress-bar-sm { height: 10px; } diff --git a/ui/app/views/components/app-container.component.html b/ui/app/views/components/app-container.component.html index 0ddd75b816..5581592a29 100644 --- a/ui/app/views/components/app-container.component.html +++ b/ui/app/views/components/app-container.component.html @@ -26,7 +26,10 @@
- TheHive Project 2016-2020, AGPL-V3 + + TheHive Project 2016-2021, + AGPL-V3 +
diff --git a/ui/app/views/directives/log-entry.html b/ui/app/views/directives/log-entry.html index 516fd1ee8f..f4b1d91abe 100644 --- a/ui/app/views/directives/log-entry.html +++ b/ui/app/views/directives/log-entry.html @@ -7,22 +7,9 @@ diff --git a/ui/app/views/partials/alert/list.html b/ui/app/views/partials/alert/list.html index c59b296a6a..0604e01be7 100644 --- a/ui/app/views/partials/alert/list.html +++ b/ui/app/views/partials/alert/list.html @@ -5,7 +5,7 @@

List of alerts ({{$vm.list.total || 0}} of {{alertEvents.count}})

- +
@@ -170,22 +170,9 @@

List of alerts ({{$vm.list.total || 0}} of {{alertEvents.c - + -

diff --git a/ui/app/views/partials/case/case.list.html b/ui/app/views/partials/case/case.list.html index 652263c438..3062b0898d 100644 --- a/ui/app/views/partials/case/case.list.html +++ b/ui/app/views/partials/case/case.list.html @@ -115,24 +115,9 @@

List of cases ({{$vm.list.total || 0}} of {{$vm.caseStats. - + - - @@ -147,9 +132,6 @@

List of cases ({{$vm.list.total || 0}} of {{$vm.caseStats. - - -
diff --git a/ui/app/views/partials/case/case.panelinfo.html b/ui/app/views/partials/case/case.panelinfo.html index c7a15240bb..72175a8ae6 100644 --- a/ui/app/views/partials/case/case.panelinfo.html +++ b/ui/app/views/partials/case/case.panelinfo.html @@ -54,21 +54,7 @@

Responders - - diff --git a/ui/app/views/partials/case/case.tasks.html b/ui/app/views/partials/case/case.tasks.html index 635e3112b7..b8495ba303 100755 --- a/ui/app/views/partials/case/case.tasks.html +++ b/ui/app/views/partials/case/case.tasks.html @@ -133,24 +133,9 @@ - + - - @@ -241,24 +226,9 @@ - + - - - + diff --git a/ui/app/views/partials/case/case.tasks.item.html b/ui/app/views/partials/case/case.tasks.item.html index cdc070c9c2..ca43c9e4e2 100644 --- a/ui/app/views/partials/case/case.tasks.item.html +++ b/ui/app/views/partials/case/case.tasks.item.html @@ -4,24 +4,10 @@

- + Responders - - diff --git a/ui/app/views/partials/misc/responder.selector.html b/ui/app/views/partials/misc/responder.selector.html new file mode 100644 index 0000000000..01d94cb0a5 --- /dev/null +++ b/ui/app/views/partials/misc/responder.selector.html @@ -0,0 +1,37 @@ + + + diff --git a/ui/app/views/partials/observables/details/artifact-details-information.html b/ui/app/views/partials/observables/details/artifact-details-information.html index 8a8a43487d..57e5192839 100644 --- a/ui/app/views/partials/observables/details/artifact-details-information.html +++ b/ui/app/views/partials/observables/details/artifact-details-information.html @@ -5,24 +5,10 @@

Metadata

diff --git a/ui/app/views/partials/observables/list/artifacts-list-main.html b/ui/app/views/partials/observables/list/artifacts-list-main.html index 7ba681f28e..ebde4a6c3f 100644 --- a/ui/app/views/partials/observables/list/artifacts-list-main.html +++ b/ui/app/views/partials/observables/list/artifacts-list-main.html @@ -113,22 +113,9 @@

Observable List ({{artifacts.total || 0}} of {{artifactStats.count}})

- + - diff --git a/ui/bower.json b/ui/bower.json index 31087bea4f..623675c7a6 100644 --- a/ui/bower.json +++ b/ui/bower.json @@ -1,6 +1,6 @@ { "name": "thehive", - "version": "3.5.0", + "version": "3.5.1", "license": "AGPL-3.0", "dependencies": { "jquery": "^3.4.1", diff --git a/ui/package.json b/ui/package.json index f9a4bf584b..36b1f2b43d 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "thehive", - "version": "3.5.0", + "version": "3.5.1", "license": "AGPL-3.0", "repository": { "type": "git", diff --git a/version.sbt b/version.sbt index a9b14317be..42efe7cae6 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "3.5.0-1" +version in ThisBuild := "3.5.1-1"