Skip to content

Commit

Permalink
Merge pull request #267 from uber/sliver-dev
Browse files Browse the repository at this point in the history
Added basic support for Sliver
  • Loading branch information
MarcOverIP authored Jan 31, 2025
2 parents 0bac709 + cf8c942 commit 6daab54
Show file tree
Hide file tree
Showing 7 changed files with 301 additions and 1 deletion.
13 changes: 13 additions & 0 deletions c2servers/cron.d/redelk_sliver
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#
# Part of RedELK
# cron.d script for periodic actions related to RedELK and Cobalt Strike
#
# Author: hypnoticpattern
#

SHELL=/bin/sh
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin

# Command to sync the logs from sliver to our scponly user's home directory
# m h dom mon dow user command
* * * * * root /usr/bin/rsync -rvx --append-verify --delete /root/.sliver/logs/audit.json /home/scponly/sliver/; /bin/chown -R scponly:scponly /home/scponly/sliver/*
17 changes: 17 additions & 0 deletions c2servers/filebeat/inputs.d/filebeat_sliver.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
- type: log
scan_frequency: 5s
enabled: true
fields_under_root: true
json.keys_under_root: true
json.add_error_key: true
paths:
- /root/.sliver/logs/audit.json
fields:
infra:
attack_scenario: @@ATTACKSCENARIO@@
log:
type: rtops
c2:
program: sliver
log:
type: events
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ fi
echo "" >>$LOGFILE

echo "[*] Fixing cron file permissions" | tee -a $LOGFILE
chown root:root /etc/cron.d/redelk >>$LOGFILE 2>&1
chown root:root /etc/cron.d/redelk && chmod 644 /etc/cron.d/redelk >> $LOGFILE 2>&1
ERROR=$?
if [ $ERROR -ne 0 ]; then
echo "[X] Could not fix cron file permissions (Error Code: $ERROR)."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@
enrich = {
"enrich_csbeacon": {"enabled": True, "interval": 300},
"enrich_stage1": {"enabled": True, "interval": 300},
'enrich_sliver': {"enabled": True, 'interval': 300},
"enrich_greynoise": {
"enabled": True,
"interval": 310,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
#!/usr/bin/python3
"""
Part of RedELK
This script enriches rtops lines with data from initial Sliver implant
Authors:
- Outflank B.V. / Mark Bergman (@xychix)
- Lorenzo Bernardi (@fastlorenzo)
- hypnoticpattern
"""

import logging
import traceback

from modules.helpers import es, get_initial_alarm_result, get_query, get_value

info = {
'version': 0.1,
'name': 'Enrich Sliver implant data',
'alarmmsg': '',
'description': 'This script enriches rtops lines with data from initial Sliver session',
'type': 'redelk_enrich',
'submodule': 'enrich_sliver'
}


class Module():
""" enrich sliver module """
def __init__(self):
self.logger = logging.getLogger(info['submodule'])

def run(self):
""" run the enrich module """
ret = get_initial_alarm_result()
ret['info'] = info
hits = self.enrich_sliver_data()
ret['hits']['hits'] = hits
ret['hits']['total'] = len(hits)
self.logger.info('finished running module. result: %s hits', ret['hits']['total'])
return ret

def enrich_sliver_data(self):
""" Get all lines in rtops that have not been enriched yet (for Sliver) """
es_query = f'implant.id:* AND c2.program: sliver AND NOT c2.log.type:implant_newsession AND NOT tags:{info["submodule"]}'
not_enriched_results = get_query(es_query, size=10000, index='rtops-*')

# Created a dict grouped by implant ID
implant_ids = {}
for not_enriched in not_enriched_results:
implant_id = get_value('_source.implant.id', not_enriched)
if implant_id in implant_ids:
implant_ids[implant_id].append(not_enriched)
else:
implant_ids[implant_id] = [not_enriched]

hits = []
# For each implant ID, get the initial session line
for implant_id, implant_val in implant_ids.items():
initial_sliver_session_doc = self.get_initial_sliver_session_doc(implant_id)

# If not initial session line found, skip the session ID
if not initial_sliver_session_doc:
continue

for doc in implant_val:
# Fields to copy: host.*, implant.*, process.*, user.*
res = self.copy_data_fields(initial_sliver_session_doc, doc, ['host', 'implant', 'user', 'process'])
if res:
hits.append(res)

return hits

def get_initial_sliver_session_doc(self, implant_id):
""" Get the initial implant document from Sliver or return False if none found """
query = f'implant.id:{implant_id} AND c2.program: sliver AND c2.log.type:implant_newsession'
initial_sliversession_doc = get_query(query, size=1, index='rtops-*')
initial_sliversession_doc = initial_sliversession_doc[0] if len(initial_sliversession_doc) > 0 else False
self.logger.debug('Initial sliver session line [%s]: %s', implant_id, initial_sliversession_doc)
return initial_sliversession_doc

def copy_data_fields(self, src, dst, fields):
""" Copy all data of [fields] from src to dst document and save it to ES """
for field in fields:
if field in dst['_source']:
self.logger.info('Field [%s] already exists in destination document, it will be overwritten', field)
dst['_source'][field] = src['_source'][field]

try:
es.update(index=dst['_index'], id=dst['_id'], body={'doc': dst['_source']})
return dst
# pylint: disable=broad-except
except Exception as error:
# stackTrace = traceback.format_exc()
self.logger.error('Error enriching sliver session document %s: %s', dst['_id'], traceback)
self.logger.exception(error)
return False
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
# Part of RedELK
#
# In this file we configure the logstash filters for Sliver logs
#
# Author: hypnoticpattern
#

filter {
if [infra][log][type] == "rtops" and [c2][program] == "sliver" {
if [c2][log][type] == "events" {

# Removing periodic commands run by the C2 server "GetSessions", "GetBeacons" and "GetVersion"
if "info" in [level] and "GetSessions" in [msg] {
drop {}
}

if "info" in [level] and "GetBeacons" in [msg] {
drop {}
}

if "info" in [level] and "GetVersion" in [msg] {
drop {}
}

# Remove base64 encoded payload from the Upload command
if "Upload" in [msg] and "Data" in [msg] {
mutate {
gsub => [ "[msg]", '\\"Data\\":\\"[a-zA-Z0-9+\/]+={0,2}\\",', "" ]
}
}

json {
source => "msg"
}

# Remove base64 encoded payload from the Upload command
if [msg][Upload] {
mutate {

remove_field => ["time"]
}
}

date {
match => [ "[time]", "ISO8601" ]
target => "@timestamp"
timezone => "Etc/UTC"
}

mutate {
copy => { "@timestamp" => "[c2][timestamp]" }
remove_field => ["time"]
}

mutate {
rename => { "[msg]" => "[c2][message]" }
}

if [method] {
mutate {
replace => { "[c2][log][type]" => "c2_command" }
gsub => [ "[method]", "/rpcpb\.SliverRPC/", "" ]
}
}

mutate {
rename => { "[method]" => "[c2][command][name]" }
rename => { "[request]" => "[c2][command][arguments]" }
rename => { "[session]" => "[c2][implant]" }
}

json {
source => "[c2][command][arguments]"
target => "[c2][command][arguments]"
}

json {
source => "[c2][implant]"
target => "[c2][implant]"
}

if [c2][command][arguments][Request][SessionID] {
mutate {
rename => { "[c2][command][arguments][Request][SessionID]" => "[implant][id]" }
}
}

if [c2][implant][Hostname] {
mutate {
copy => { "[c2][implant][Hostname]" => "[host][name]" }
}
}

if [c2][implant][Username] {
mutate {
copy => { "[c2][implant][Username]" => "[user][name]" }
}
}

# Logstash errors out converting TunnelID to long. Forcing casting to string.
if [c2][command][arguments][TunnelID]
{
mutate {
convert => { "[c2][command][arguments][TunnelID]" => "string" }
}
}

if [c2][command][name] == "LootAdd" {
if [c2][command][arguments][Credential] {
mutate {
replace => { "[c2][log][type]" => "credentials"}
}

# User/Password
if [c2][command][arguments][CredentialType] == 1 {
mutate {
copy => { "[c2][command][arguments][Credential][User]" => "[creds][username]" }
copy => { "[c2][command][arguments][Credential][Password]" => "[creds][credential]"}
}
}

# API Key
if [c2][command][arguments][CredentialType] == 1 {
mutate {
copy => { "[c2][command][arguments][Credential][Name]" => "[creds][username]"}
copy => { "[c2][command][arguments][Credential][APIKey]" => "[creds][credential]"}
}
}
}
}

# Handle new Sliver session
if [level] == "warning" and [Session] {

grok {
match => { "[Session][RemoteAddress]" => "%{IP:[host][ip_ext]}:%{POSINT}"}
}

if [Session][ActiveC2]
{
mutate {
convert => { "[Session][ActiveC2]" => "string" }
}
}

mutate {
replace => { "[c2][log][type]" => "implant_newsession" }
rename => { "[Session][OS]" => "[host][os][family]" }
rename => { "[Session][Hostname]" => "[host][name]" }
rename => { "[Session][ID]" => "[implant][id]" }
rename => { "[Session][LastCheckin]" => "[implant][checkin]" }
rename => { "[Session][Name]" => "[implant][name]" }
rename => { "[Session][Username]" => "[user][name]" }
rename => { "[Session][PID]" => "[process][pid]" }
rename => { "[Session][Filename]" => "[process][name]" }
rename => { "[Session][Transport]" => "[c2][listener][type]" }
rename => { "[Session][ActiveC2]" => "[implant][url]" }
}

date {
match => [ "[implant][checkin]", "UNIX" ]
target => "[implant][checkin]"
timezone => "Etc/UTC"
}

}
}
}
}
4 changes: 4 additions & 0 deletions elkserver/mounts/redelk-config/etc/redelk/config.json.example
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,10 @@
"enabled": true,
"interval": 300
},
"enrich_sliver": {
"enabled": true,
"interval": 300
},
"enrich_greynoise": {
"enabled": true,
"interval": 310,
Expand Down

0 comments on commit 6daab54

Please sign in to comment.