Skip to content

Commit

Permalink
Merge branch 'release/4.1.1' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
To-om committed Mar 23, 2021
2 parents ce20ee3 + 2fa00e8 commit 7575943
Show file tree
Hide file tree
Showing 20 changed files with 114 additions and 30 deletions.
19 changes: 19 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,24 @@
# Change Log

## [4.1.1](https://github.com/TheHive-Project/TheHive/milestone/70) (2021-03-23)

**Implemented enhancements:**

- [Feature Request] Include organisation ID in webhooks [\#1865](https://github.com/TheHive-Project/TheHive/issues/1865)

**Closed issues:**

- [Bug] Importing the ATT&CK library fails on 4.1 [\#1862](https://github.com/TheHive-Project/TheHive/issues/1862)
- Thehive4.1.0 Issues with Lucene [\#1863](https://github.com/TheHive-Project/TheHive/issues/1863)

**Fixed bugs:**

- [Bug] TheHive doesn't start if webhook is configured without authentication [\#1859](https://github.com/TheHive-Project/TheHive/issues/1859)
- [Bug] Migration fails from 4.0.5 to 4.1 [\#1861](https://github.com/TheHive-Project/TheHive/issues/1861)
- [Bug] Filter by "IMPORTED" does not work [\#1866](https://github.com/TheHive-Project/TheHive/issues/1866)
- [Bug] TheHive doesn't start in cluster mode (serializer is missing) [\#1868](https://github.com/TheHive-Project/TheHive/issues/1868)
- [Bug] Full-text search is slow [\#1870](https://github.com/TheHive-Project/TheHive/issues/1870)

## [4.1.0](https://github.com/TheHive-Project/TheHive/milestone/56) (2021-03-18)

**Implemented enhancements:**
Expand Down
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import Dependencies._
import com.typesafe.sbt.packager.Keys.bashScriptDefines
import org.thp.ghcl.Milestone

val thehiveVersion = "4.1.0-1"
val thehiveVersion = "4.1.1-1"
val scala212 = "2.12.13"
val scala213 = "2.13.1"
val supportedScalaVersions = List(scala212, scala213)
Expand Down
2 changes: 1 addition & 1 deletion frontend/bower.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "thehive",
"version": "4.1.0-1",
"version": "4.1.1-1",
"license": "AGPL-3.0",
"dependencies": {
"jquery": "^3.4.1",
Expand Down
2 changes: 1 addition & 1 deletion frontend/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "thehive",
"version": "4.1.0-1",
"version": "4.1.1-1",
"license": "AGPL-3.0",
"repository": {
"type": "git",
Expand Down
22 changes: 22 additions & 0 deletions package/docker/entrypoint
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#!/bin/bash
CQL_HOSTNAMES=${TH_CQL_HOSTNAMES:-cassandra}
BDB_DIRECTORY=${TH_BDB_DIRECTORY:-/data/db}
INDEX_DIRECTORY=${TH_INDEX_DIRECTORY:-/data/index}
HDFS_URL=${TH_HDFS_URL}
STORAGE_DIRECTORY=${TH_STORAGE_DIRECTORY:-/data/files}
test "${TH_NO_CONFIG_SECRET}" == 1
Expand Down Expand Up @@ -60,6 +61,7 @@ do
"--cql-username") shift; CQL_USERNAME=$1 ;;
"--cql-password") shift; CQL_PASSWORD=$1 ;;
"--bdb-directory") shift; BDB_DIRECTORY=$1 ;;
"--index-directory") shift; INDEX_DIRECTORY=$1 ;;
"--no-config-storage") CONFIG_STORAGE=0 ;;
"--hdfs-url") shift; HDFS_URL=$1 ;;
"--storage-directory") shift; STORAGE_DIRECTORY=$1 ;;
Expand Down Expand Up @@ -103,6 +105,12 @@ then
echo "storage.backend = berkeleyje" >> ${CONFIG_FILE}
echo "storage.directory = \"${BDB_DIRECTORY}\"" >> ${CONFIG_FILE}
echo "berkeleyje.freeDisk = 1" >> ${CONFIG_FILE}
if test -e "${BDB_DIRECTORY}"
then
test -w "${BDB_DIRECTORY}" || echo "WARNING the directory used to store database ($BDB_DIRECTORY) is not writable"
else
mkdir -p "${BDB_DIRECTORY}" || echo "WARNING the directory used to store database ($BDB_DIRECTORY) is not writable"
fi
else
echo "Using cassandra address = ${CQL[@]}"
echo "storage.backend = cql" >> ${CONFIG_FILE}
Expand All @@ -120,6 +128,14 @@ then
echo "Waiting until Cassandra DB is up"
sleep 30 # Sleep until cassandra Db is up
fi
echo "index.search.backend = lucene" >> ${CONFIG_FILE}
echo "index.search.directory = \"${INDEX_DIRECTORY}\"" >> ${CONFIG_FILE}
if test -e "${INDEX_DIRECTORY}"
then
test -w "${INDEX_DIRECTORY}" || echo "WARNING the directory used to store index ($INDEX_DIRECTORY) is not writable"
else
mkdir -p "${INDEX_DIRECTORY}" || echo "WARNING the directory used to store index ($INDEX_DIRECTORY) is not writable"
fi
echo "}" >> ${CONFIG_FILE}
fi

This comment has been minimized.

Copy link
@vxsh4d0w

vxsh4d0w Mar 23, 2021

Contributor

thehive | /opt/thehive/entrypoint: line 141: syntax error near unexpected token fi' thehive | /opt/thehive/entrypoint: line 141: fi'

Expand All @@ -140,6 +156,12 @@ then
mkdir -p "${STORAGE_DIRECTORY}"
echo "provider: localfs" >> ${CONFIG_FILE}
echo "localfs.directory: \"${STORAGE_DIRECTORY}\"" >> ${CONFIG_FILE}
if test -e "${STORAGE_DIRECTORY}"
then
test -w "${STORAGE_DIRECTORY}" || echo "WARNING the directory used to store files ($STORAGE_DIRECTORY) is not writable"
else
mkdir -p "${STORAGE_DIRECTORY}" || echo "WARNING the directory used to store files ($STORAGE_DIRECTORY) is not writable"
fi
fi
echo "}" >> ${CONFIG_FILE}
fi
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ class Properties @Inject() (
.property("_id", UMapping.entityId)(
_.select(_._id)
.filter[EntityId] {
case (_, t, _, Right(p)) => t.has(T.id, p.map(_.value))
case (_, t, _, Right(p)) => t.has(T.id, p.mapValue(_.value))
case (_, t, _, Left(true)) => t
case (_, t, _, _) => t.empty
}
Expand Down
2 changes: 1 addition & 1 deletion thehive/app/org/thp/thehive/models/Alert.scala
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ case class AlertTag()
@DefineIndex(IndexType.standard, "source")
@DefineIndex(IndexType.standard, "sourceRef")
@DefineIndex(IndexType.fulltext, "title")
@DefineIndex(IndexType.fulltext, "description")
@DefineIndex(IndexType.fulltextOnly, "description")
@DefineIndex(IndexType.standard, "severity")
@DefineIndex(IndexType.standard, "date")
@DefineIndex(IndexType.standard, "lastSyncDate")
Expand Down
4 changes: 2 additions & 2 deletions thehive/app/org/thp/thehive/models/Case.scala
Original file line number Diff line number Diff line change
Expand Up @@ -83,15 +83,15 @@ case class CaseProcedure()
@BuildVertexEntity
@DefineIndex(IndexType.unique, "number")
@DefineIndex(IndexType.fulltext, "title")
@DefineIndex(IndexType.fulltext, "description")
@DefineIndex(IndexType.fulltextOnly, "description")
@DefineIndex(IndexType.standard, "severity")
@DefineIndex(IndexType.standard, "startDate")
@DefineIndex(IndexType.standard, "endDate")
@DefineIndex(IndexType.standard, "flag")
@DefineIndex(IndexType.standard, "tlp")
@DefineIndex(IndexType.standard, "pap")
@DefineIndex(IndexType.standard, "status")
@DefineIndex(IndexType.fulltext, "summary")
@DefineIndex(IndexType.fulltextOnly, "summary")
@DefineIndex(IndexType.standard, "tags")
@DefineIndex(IndexType.standard, "assignee")
@DefineIndex(IndexType.standard, "organisationIds")
Expand Down
2 changes: 1 addition & 1 deletion thehive/app/org/thp/thehive/models/Log.scala
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import java.util.Date
@BuildEdgeEntity[Log, Attachment]
case class LogAttachment()

@DefineIndex(IndexType.fulltext, "message")
@DefineIndex(IndexType.fulltextOnly, "message")
@DefineIndex(IndexType.standard, "date")
@DefineIndex(IndexType.standard, "taskId")
@DefineIndex(IndexType.standard, "organisationIds")
Expand Down
2 changes: 1 addition & 1 deletion thehive/app/org/thp/thehive/models/Observable.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ case class ObservableData()
@BuildEdgeEntity[Observable, Tag]
case class ObservableTag()

@DefineIndex(IndexType.fulltext, "message")
@DefineIndex(IndexType.fulltextOnly, "message")
@DefineIndex(IndexType.standard, "tlp")
@DefineIndex(IndexType.standard, "ioc")
@DefineIndex(IndexType.standard, "sighted")
Expand Down
3 changes: 2 additions & 1 deletion thehive/app/org/thp/thehive/models/Tag.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ import org.thp.scalligraph.BuildVertexEntity
import org.thp.scalligraph.models.{DefineIndex, IndexType}

@DefineIndex(IndexType.unique, "namespace", "predicate", "value")
@DefineIndex(IndexType.fulltext, "namespace", "predicate", "value", "description")
@DefineIndex(IndexType.fulltext, "namespace", "predicate", "value")
@DefineIndex(IndexType.fulltextOnly, "description")
@BuildVertexEntity
case class Tag(
namespace: String,
Expand Down
2 changes: 1 addition & 1 deletion thehive/app/org/thp/thehive/models/Task.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ case class TaskLog()
@BuildVertexEntity
@DefineIndex(IndexType.fulltext, "title")
@DefineIndex(IndexType.standard, "group")
@DefineIndex(IndexType.fulltext, "description")
@DefineIndex(IndexType.fulltextOnly, "description")
@DefineIndex(IndexType.standard, "status")
@DefineIndex(IndexType.standard, "flag")
@DefineIndex(IndexType.standard, "startDate")
Expand Down
21 changes: 21 additions & 0 deletions thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala
Original file line number Diff line number Diff line change
Expand Up @@ -391,6 +391,27 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema {
traversal.outE("OrganisationDashboard").raw.property("writable", true).iterate()
Success(())
}
//=====[release 4.1.0]=====
.removeIndex("Alert", IndexType.fulltext, "description")
.removeIndex("Case", IndexType.fulltext, "description", "summary")
.removeIndex("Log", IndexType.fulltext, "message")
.removeIndex("Observable", IndexType.fulltext, "message")
.removeIndex("Log", IndexType.fulltext, "message")
.removeIndex("Tag", IndexType.fulltext, "description")
.removeIndex("Task", IndexType.fulltext, "description")
.updateGraph("Set caseId in imported alerts", "Alert") { traversal =>
traversal
.project(
_.by
.by(_.out("AlertCase")._id.option)
)
.foreach {
case (vertex, caseId) =>
caseId.foreach(vertex.property("caseId", _))
case _ =>
}
Success(())
}

val reflectionClasses = new Reflections(
new ConfigurationBuilder()
Expand Down
11 changes: 6 additions & 5 deletions thehive/app/org/thp/thehive/services/AlertSrv.scala
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,7 @@ class AlertSrv @Inject() (
createdCase <- caseSrv.create(case0, assignee, organisation, customField, caseTemplate, Nil)
_ <- importObservables(alert.alert, createdCase.`case`)
_ <- alertCaseSrv.create(AlertCase(), alert.alert, createdCase.`case`)
_ <- get(alert.alert).update(_.caseId, Some(createdCase._id)).getOrFail("Alert")
_ <- markAsRead(alert._id)
_ = integrityCheckActor ! EntityAdded("Alert")
} yield createdCase
Expand Down Expand Up @@ -481,11 +482,11 @@ object AlertOps {
.value(_.`type`)
.headOption
.map {
case CustomFieldType.boolean => traversal.filter(_.customFields(customField).has(_.booleanValue, predicate.map(_.as[Boolean])))
case CustomFieldType.date => traversal.filter(_.customFields(customField).has(_.dateValue, predicate.map(_.as[Date])))
case CustomFieldType.float => traversal.filter(_.customFields(customField).has(_.floatValue, predicate.map(_.as[Double])))
case CustomFieldType.integer => traversal.filter(_.customFields(customField).has(_.integerValue, predicate.map(_.as[Int])))
case CustomFieldType.string => traversal.filter(_.customFields(customField).has(_.stringValue, predicate.map(_.as[String])))
case CustomFieldType.boolean => traversal.filter(_.customFields(customField).has(_.booleanValue, predicate.mapValue(_.as[Boolean])))
case CustomFieldType.date => traversal.filter(_.customFields(customField).has(_.dateValue, predicate.mapValue(_.as[Date])))
case CustomFieldType.float => traversal.filter(_.customFields(customField).has(_.floatValue, predicate.mapValue(_.as[Double])))
case CustomFieldType.integer => traversal.filter(_.customFields(customField).has(_.integerValue, predicate.mapValue(_.as[Int])))
case CustomFieldType.string => traversal.filter(_.customFields(customField).has(_.stringValue, predicate.mapValue(_.as[String])))
}
.getOrElse(traversal.empty)

Expand Down
10 changes: 5 additions & 5 deletions thehive/app/org/thp/thehive/services/CaseSrv.scala
Original file line number Diff line number Diff line change
Expand Up @@ -507,11 +507,11 @@ object CaseOps {
.value(_.`type`)
.headOption
.map {
case CustomFieldType.boolean => traversal.filter(_.customFields(customField).has(_.booleanValue, predicate.map(_.as[Boolean])))
case CustomFieldType.date => traversal.filter(_.customFields(customField).has(_.dateValue, predicate.map(_.as[Date])))
case CustomFieldType.float => traversal.filter(_.customFields(customField).has(_.floatValue, predicate.map(_.as[Double])))
case CustomFieldType.integer => traversal.filter(_.customFields(customField).has(_.integerValue, predicate.map(_.as[Int])))
case CustomFieldType.string => traversal.filter(_.customFields(customField).has(_.stringValue, predicate.map(_.as[String])))
case CustomFieldType.boolean => traversal.filter(_.customFields(customField).has(_.booleanValue, predicate.mapValue(_.as[Boolean])))
case CustomFieldType.date => traversal.filter(_.customFields(customField).has(_.dateValue, predicate.mapValue(_.as[Date])))
case CustomFieldType.float => traversal.filter(_.customFields(customField).has(_.floatValue, predicate.mapValue(_.as[Double])))
case CustomFieldType.integer => traversal.filter(_.customFields(customField).has(_.integerValue, predicate.mapValue(_.as[Int])))
case CustomFieldType.string => traversal.filter(_.customFields(customField).has(_.stringValue, predicate.mapValue(_.as[String])))
}
.getOrElse(traversal.empty)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package org.thp.thehive.services

import akka.serialization.Serializer
import play.api.libs.json.{Json, OFormat}

import java.io.NotSerializableException

Expand All @@ -9,19 +10,30 @@ class IntegrityCheckSerializer extends Serializer {

override def includeManifest: Boolean = false

implicit val duplicationCheckResultFormat: OFormat[DuplicationCheckResult] = Json.format[DuplicationCheckResult]
implicit val globalCheckResultFormat: OFormat[GlobalCheckResult] = Json.format[GlobalCheckResult]

override def toBinary(o: AnyRef): Array[Byte] =
o match {
case EntityAdded(name) => 0.toByte +: name.getBytes
case NeedCheck(name) => 1.toByte +: name.getBytes
case DuplicationCheck(name) => 2.toByte +: name.getBytes
case _ => throw new NotSerializableException
case EntityAdded(name) => 0.toByte +: name.getBytes
case NeedCheck(name) => 1.toByte +: name.getBytes
case DuplicationCheck(name) => 2.toByte +: name.getBytes
case duplicationCheckResult: DuplicationCheckResult => 3.toByte +: Json.toJson(duplicationCheckResult).toString.getBytes
case GlobalCheckRequest(name) => 4.toByte +: name.getBytes
case globalCheckResult: GlobalCheckResult => 5.toByte +: Json.toJson(globalCheckResult).toString.getBytes
case GetCheckStats(name) => 6.toByte +: name.getBytes
case _ => throw new NotSerializableException
}

override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef =
bytes(0) match {
case 0 => EntityAdded(new String(bytes.tail))
case 1 => NeedCheck(new String(bytes.tail))
case 2 => DuplicationCheck(new String(bytes.tail))
case 3 => Json.parse(bytes.tail).as[DuplicationCheckResult]
case 4 => GlobalCheckRequest(new String(bytes.tail))
case 5 => Json.parse(bytes.tail).as[GlobalCheckResult]
case 6 => GetCheckStats(new String(bytes.tail))
case _ => throw new NotSerializableException
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package org.thp.thehive.services.notification.notifiers

import akka.stream.Materializer
import org.apache.tinkerpop.gremlin.structure.Vertex
import org.thp.client.{Authentication, ProxyWS, ProxyWSConfig}
import org.thp.client.{Authentication, NoAuthentication, ProxyWS, ProxyWSConfig}
import org.thp.scalligraph.models.{Entity, UMapping}
import org.thp.scalligraph.services.config.{ApplicationConfig, ConfigItem}
import org.thp.scalligraph.traversal.TraversalOps._
Expand Down Expand Up @@ -31,7 +31,7 @@ case class WebhookNotification(
name: String,
url: String,
version: Int = 0,
auth: Authentication,
auth: Authentication = NoAuthentication,
wsConfig: ProxyWSConfig = ProxyWSConfig(),
includedTheHiveOrganisations: Seq[String] = Seq("*"),
excludedTheHiveOrganisations: Seq[String] = Nil
Expand Down Expand Up @@ -258,7 +258,11 @@ class Webhook(
else {
val ws = new ProxyWS(config.wsConfig, mat)
val async = for {
message <- Future.fromTry(buildMessage(config.version, audit))
message <- Future.fromTry(
buildMessage(config.version, audit).map(
_ + ("organisationId" -> JsString(organisation._id.toString)) + ("organisation" -> JsString(organisation.name))
)
)
_ = logger.debug(s"Request webhook with message $message")
resp <- config.auth(ws.url(config.url)).post(message)
} yield if (resp.status >= 400) logger.warn(s"Webhook call on ${config.url} returns ${resp.status} ${resp.statusText}") else ()
Expand Down
4 changes: 4 additions & 0 deletions thehive/conf/play/reference-overrides.conf
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@ play.filters {
]
}

play.http.parser.maxDiskBuffer = 128MB
play.http.parser.maxMemoryBuffer = 256kB


# Register module for dependency injection
play.modules.enabled += org.thp.thehive.TheHiveModule

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class StatusCtrlTest extends PlaySpecification with TestAppBuilder {
"pollingDuration" -> 1000
),
"schemaStatus" -> Json.arr(
Json.obj("name" -> "thehive", "currentVersion" -> 59, "expectedVersion" -> 59, "error" -> JsNull)
Json.obj("name" -> "thehive", "currentVersion" -> 67, "expectedVersion" -> 67, "error" -> JsNull)
)
)

Expand Down

0 comments on commit 7575943

Please sign in to comment.