From 846789cad6863c19284848eca4d27e8c00fb2232 Mon Sep 17 00:00:00 2001 From: mzfr Date: Sun, 7 Jun 2020 20:58:40 +0530 Subject: [PATCH 01/31] Add docstrings to some functions --- tanner/api/api.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tanner/api/api.py b/tanner/api/api.py index 2908f0c4..baea1001 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -10,6 +10,12 @@ def __init__(self, redis_client): self.redis_client = redis_client async def return_snares(self): + """Returns a list of all the snares that are + connected to the tanner. + + Returns: + [list] -- List containing UUID of all snares + """ query_res = [] try: query_res = await self.redis_client.smembers('snare_ids', encoding='utf-8') @@ -18,6 +24,14 @@ async def return_snares(self): return list(query_res) async def return_snare_stats(self, snare_uuid): + """Returns the stats of the given snare + + Arguments: + snare_uuid {uuid} -- UUID of snare + + Returns: + [dict] -- Dictionary containing all stats snare. + """ result = {} result['total_sessions'] = 0 result['total_duration'] = 0 @@ -41,6 +55,12 @@ async def return_snare_stats(self, snare_uuid): return result async def return_snare_info(self, uuid, count=-1): + """Returns JSON data that contains information about + all the sessions a single snare instance have. + + Returns: + uuid [string] - Snare UUID + """ query_res = [] try: query_res = await self.redis_client.zrevrangebyscore( From 0654fc0e2452f61c9b7fb1ee52b32bac7cad2d37 Mon Sep 17 00:00:00 2001 From: mzfr Date: Sun, 7 Jun 2020 20:59:59 +0530 Subject: [PATCH 02/31] update API to take out sessions from postgres --- tanner/api/api.py | 127 +++++++++++++++++++++++++++++++++++-------- tanner/api/server.py | 9 ++- 2 files changed, 111 insertions(+), 25 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index baea1001..ff1d5c6e 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -2,12 +2,16 @@ import logging import operator import aioredis +import psycopg2 +from collections import ChainMap +from tanner.utils.attack_type import AttackType class Api: - def __init__(self, redis_client): + def __init__(self, redis_client, pg_client): self.logger = logging.getLogger('tanner.api.Api') self.redis_client = redis_client + self.pg_client = pg_client async def return_snares(self): """Returns a list of all the snares that are @@ -17,11 +21,15 @@ async def return_snares(self): [list] -- List containing UUID of all snares """ query_res = [] - try: - query_res = await self.redis_client.smembers('snare_ids', encoding='utf-8') - except aioredis.ProtocolError as connection_error: - self.logger.exception('Can not connect to redis %s', connection_error) - return list(query_res) + async with self.pg_client.acquire() as conn: + async with conn.cursor() as cur: + await cur.execute("SELECT DISTINCT sensor_id FROM sessions") + ans = await cur.fetchall() + for r in ans: + query_res.append(str(r[0])) + cur.close() + conn.close() + return query_res async def return_snare_stats(self, snare_uuid): """Returns the stats of the given snare @@ -48,13 +56,13 @@ async def return_snare_stats(self, snare_uuid): result['total_sessions'] = len(sessions) for sess in sessions: result['total_duration'] += sess['end_time'] - sess['start_time'] - for attack in sess['attack_types']: - if attack in result['attack_frequency']: - result['attack_frequency'][attack] += 1 + for path in sess['paths']: + if path['attack_type'] in result['attack_frequency']: + result['attack_frequency'][path['attack_type']] += 1 return result - async def return_snare_info(self, uuid, count=-1): + async def return_snare_info(self, uuid): """Returns JSON data that contains information about all the sessions a single snare instance have. @@ -62,17 +70,93 @@ async def return_snare_info(self, uuid, count=-1): uuid [string] - Snare UUID """ query_res = [] - try: - query_res = await self.redis_client.zrevrangebyscore( - uuid, offset=0, count=count, encoding='utf-8' - ) - except aioredis.ProtocolError as connection_error: - self.logger.exception('Can not connect to redis %s', connection_error) - else: - if not query_res: - return 'Invalid SNARE UUID' - for (i, val) in enumerate(query_res): - query_res[i] = json.loads(val) + async with self.pg_client.acquire() as conn: + async with conn.cursor() as cur: + await cur.execute( + """ + SELECT * FROM sessions + WHERE + sessions.sensor_id = '%s' + """ % (uuid) + ) + session = await cur.fetchall() + for r in session: + sess = { + 'sess_uuid': str(r[0]), + 'snare_uuid': str(r[1]), + 'ip': r[2], + 'port': r[3], + 'location': { + 'country': r[4], + 'country_code': r[5], + 'city': r[6], + 'zip_code': r[7], + }, + 'user_agent': r[8], + 'start_time': r[9].timestamp(), + 'end_time': r[10].timestamp(), + 'request_per_second': r[11], + 'approx_time_between_requests': r[12], + 'accepted_paths': r[13], + 'errors': r[14], + 'hidden_links': r[15], + 'referrer': r[16] + } + + await cur.execute( + """ + SELECT * FROM cookies WHERE cookies.session_id = '%s' + """ % (str(r[0])) + ) + cookies = await cur.fetchall() + + all_cookies = [] + for r in cookies: + all_cookies.append( + { + r[1]: r[2] + } + ) + sess['cookies'] = dict(ChainMap(*all_cookies)) + + await cur.execute( + """ + SELECT * FROM paths WHERE paths.session_id = '%s' + """ % (str(r[0])) + ) + paths = await cur.fetchall() + all_paths = [] + + for p in paths: + all_paths.append( + { + "path": p[1], + "timestamp": p[2].timestamp(), + "response_status": p[3], + "attack_type": AttackType(p[4]).name + } + ) + sess['paths'] = dict(ChainMap(*all_paths)) + + await cur.execute( + """ + SELECT * FROM owners WHERE owners.session_id = '%s' + """ % (str(r[0])) + ) + + owners = await cur.fetchall() + owner_type = [] + + for o in owners: + owner_type.append( + { + o[1]: o[2] + } + ) + sess['owners'] = dict(ChainMap(*owner_type)) + query_res.append(sess) + cur.close() + conn.close() return query_res async def return_session_info(self, sess_uuid, snare_uuid=None): @@ -109,7 +193,6 @@ async def return_sessions(self, filters): if match_count == len(filters): matching_sessions.append(sess) - return matching_sessions async def return_latest_session(self): diff --git a/tanner/api/server.py b/tanner/api/server.py index ccc62bad..df0d0433 100644 --- a/tanner/api/server.py +++ b/tanner/api/server.py @@ -5,7 +5,7 @@ from aiohttp.web import middleware from tanner.api import api -from tanner import redis_client +from tanner import redis_client, postgres_client from tanner.config import TannerConfig from tanner.utils.api_key_generator import generate @@ -38,7 +38,7 @@ async def handle_snares(self, request): async def handle_snare_info(self, request): snare_uuid = request.match_info['snare_uuid'] - result = await self.api.return_snare_info(snare_uuid, 50) + result = await self.api.return_snare_info(snare_uuid) response_msg = self._make_response(result) return web.json_response(response_msg) @@ -109,7 +109,10 @@ def create_app(self, loop, auth=False): def start(self): loop = asyncio.get_event_loop() self.redis_client = loop.run_until_complete(redis_client.RedisClient.get_redis_client(poolsize=20)) - self.api = api.Api(self.redis_client) + self.pg_client = loop.run_until_complete( + postgres_client.PostgresClient().get_pg_client() + ) + self.api = api.Api(self.redis_client, self.pg_client) set_auth = TannerConfig.get('API', 'auth') app = self.create_app(loop, set_auth) host = TannerConfig.get('API', 'host') From d784260a90e7e53229c7fab5f636011aa8bf5095 Mon Sep 17 00:00:00 2001 From: mzfr Date: Tue, 9 Jun 2020 20:37:58 +0530 Subject: [PATCH 03/31] Fix the docstring --- tanner/api/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index ff1d5c6e..48155df7 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -66,7 +66,7 @@ async def return_snare_info(self, uuid): """Returns JSON data that contains information about all the sessions a single snare instance have. - Returns: + Arguments: uuid [string] - Snare UUID """ query_res = [] From 714aef113cc81a02fc41e19543cdebd17dcffd5a Mon Sep 17 00:00:00 2001 From: mzfr Date: Wed, 10 Jun 2020 14:27:25 +0530 Subject: [PATCH 04/31] Fetch 200 rows at a time This is done in case there are large number of sessions for a snare --- tanner/api/api.py | 168 ++++++++++++++++++++++++++-------------------- 1 file changed, 95 insertions(+), 73 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 48155df7..3b553dd6 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -79,82 +79,104 @@ async def return_snare_info(self, uuid): sessions.sensor_id = '%s' """ % (uuid) ) - session = await cur.fetchall() - for r in session: - sess = { - 'sess_uuid': str(r[0]), - 'snare_uuid': str(r[1]), - 'ip': r[2], - 'port': r[3], - 'location': { - 'country': r[4], - 'country_code': r[5], - 'city': r[6], - 'zip_code': r[7], - }, - 'user_agent': r[8], - 'start_time': r[9].timestamp(), - 'end_time': r[10].timestamp(), - 'request_per_second': r[11], - 'approx_time_between_requests': r[12], - 'accepted_paths': r[13], - 'errors': r[14], - 'hidden_links': r[15], - 'referrer': r[16] - } - - await cur.execute( - """ - SELECT * FROM cookies WHERE cookies.session_id = '%s' - """ % (str(r[0])) - ) - cookies = await cur.fetchall() - - all_cookies = [] - for r in cookies: - all_cookies.append( - { - r[1]: r[2] - } + while True: + session = await cur.fetchmany(size=200) + + if not session: + break + + for r in session: + sess = { + 'sess_uuid': str(r[0]), + 'snare_uuid': str(r[1]), + 'ip': r[2], + 'port': r[3], + 'location': { + 'country': r[4], + 'country_code': r[5], + 'city': r[6], + 'zip_code': r[7], + }, + 'user_agent': r[8], + 'start_time': r[9].timestamp(), + 'end_time': r[10].timestamp(), + 'request_per_second': r[11], + 'approx_time_between_requests': r[12], + 'accepted_paths': r[13], + 'errors': r[14], + 'hidden_links': r[15], + 'referrer': r[16] + } + + #Extracting all cookies + await cur.execute( + """ + SELECT * FROM cookies WHERE cookies.session_id = '%s' + """ % (str(r[0])) ) - sess['cookies'] = dict(ChainMap(*all_cookies)) - - await cur.execute( - """ - SELECT * FROM paths WHERE paths.session_id = '%s' - """ % (str(r[0])) - ) - paths = await cur.fetchall() - all_paths = [] - - for p in paths: - all_paths.append( - { - "path": p[1], - "timestamp": p[2].timestamp(), - "response_status": p[3], - "attack_type": AttackType(p[4]).name - } + + while True: + cookies = await cur.fetchmany(size=200) + + if not cookies: + break + + all_cookies = [] + for r in cookies: + all_cookies.append( + { + r[1]: r[2] + } + ) + sess['cookies'] = dict(ChainMap(*all_cookies)) + + #Extracting all paths + await cur.execute( + """ + SELECT * FROM paths WHERE paths.session_id = '%s' + """ % (str(r[0])) ) - sess['paths'] = dict(ChainMap(*all_paths)) - - await cur.execute( - """ - SELECT * FROM owners WHERE owners.session_id = '%s' - """ % (str(r[0])) - ) - - owners = await cur.fetchall() - owner_type = [] - - for o in owners: - owner_type.append( - { - o[1]: o[2] - } + + while True: + paths = await cur.fetchmany(size=200) + + if not paths: + break + all_paths = [] + + for p in paths: + all_paths.append( + { + "path": p[1], + "timestamp": p[2].timestamp(), + "response_status": p[3], + "attack_type": AttackType(p[4]).name + } + ) + sess['paths'] = all_paths + + # Extracting all owners + await cur.execute( + """ + SELECT * FROM owners WHERE owners.session_id = '%s' + """ % (str(r[0])) ) - sess['owners'] = dict(ChainMap(*owner_type)) - query_res.append(sess) + + while True: + owners = await cur.fetchmany(size=200) + + if not owners: + break + owner_type = [] + + for o in owners: + owner_type.append( + { + o[1]: o[2] + } + ) + sess['owners'] = dict(ChainMap(*owner_type)) + query_res.append(sess) cur.close() conn.close() return query_res From 2906221f5d78b217409a287c3f1b9822cae45f81 Mon Sep 17 00:00:00 2001 From: mzfr Date: Wed, 10 Jun 2020 14:57:03 +0530 Subject: [PATCH 05/31] Handle Invalid SNARE uuid --- tanner/api/api.py | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 3b553dd6..20f7f5ec 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -3,6 +3,8 @@ import operator import aioredis import psycopg2 +from asyncio import TimeoutError +from uuid import UUID from collections import ChainMap from tanner.utils.attack_type import AttackType @@ -69,9 +71,13 @@ async def return_snare_info(self, uuid): Arguments: uuid [string] - Snare UUID """ - query_res = [] - async with self.pg_client.acquire() as conn: - async with conn.cursor() as cur: + try: + #generates a ValueError if invalid UUID is given + UUID(uuid) + + query_res = [] + async with self.pg_client.acquire() as conn: + async with conn.cursor() as cur: await cur.execute( """ SELECT * FROM sessions @@ -177,8 +183,16 @@ async def return_snare_info(self, uuid): ) sess['owners'] = dict(ChainMap(*owner_type)) query_res.append(sess) - cur.close() - conn.close() + cur.close() + conn.close() + except ( + ValueError, + TimeoutError, + psycopg2.ProgrammingError, + psycopg2.OperationalError + ): + query_res = 'Invalid SNARE UUID' + return query_res async def return_session_info(self, sess_uuid, snare_uuid=None): From 31cf92df16618991bb95a80cbd4c428613ff8661 Mon Sep 17 00:00:00 2001 From: mzfr Date: Wed, 10 Jun 2020 15:05:42 +0530 Subject: [PATCH 06/31] Remove redis from API --- tanner/api/api.py | 4 +--- tanner/api/server.py | 7 +++---- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 20f7f5ec..83955388 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -1,7 +1,6 @@ import json import logging import operator -import aioredis import psycopg2 from asyncio import TimeoutError from uuid import UUID @@ -10,9 +9,8 @@ class Api: - def __init__(self, redis_client, pg_client): + def __init__(self, pg_client): self.logger = logging.getLogger('tanner.api.Api') - self.redis_client = redis_client self.pg_client = pg_client async def return_snares(self): diff --git a/tanner/api/server.py b/tanner/api/server.py index df0d0433..fe1bf87d 100644 --- a/tanner/api/server.py +++ b/tanner/api/server.py @@ -5,7 +5,7 @@ from aiohttp.web import middleware from tanner.api import api -from tanner import redis_client, postgres_client +from tanner import postgres_client from tanner.config import TannerConfig from tanner.utils.api_key_generator import generate @@ -77,7 +77,7 @@ async def handle_session_info(self, request): return web.json_response(response_msg) async def on_shutdown(self, app): - self.redis_client.close() + self.pg_client.close() @middleware async def auth(self, request, handler): @@ -108,11 +108,10 @@ def create_app(self, loop, auth=False): def start(self): loop = asyncio.get_event_loop() - self.redis_client = loop.run_until_complete(redis_client.RedisClient.get_redis_client(poolsize=20)) self.pg_client = loop.run_until_complete( postgres_client.PostgresClient().get_pg_client() ) - self.api = api.Api(self.redis_client, self.pg_client) + self.api = api.Api(self.pg_client) set_auth = TannerConfig.get('API', 'auth') app = self.create_app(loop, set_auth) host = TannerConfig.get('API', 'host') From 1e3e93c01b856608ff54a8b544e133984f088309 Mon Sep 17 00:00:00 2001 From: mzfr Date: Thu, 11 Jun 2020 20:47:33 +0530 Subject: [PATCH 07/31] update the postges client and create_table function Made changes to use sqlalchemy instead of running raw queries --- tanner/dbutils.py | 121 +++++++++++++++++--------------------- tanner/postgres_client.py | 16 +++-- 2 files changed, 65 insertions(+), 72 deletions(-) diff --git a/tanner/dbutils.py b/tanner/dbutils.py index c1f07685..c123c10b 100644 --- a/tanner/dbutils.py +++ b/tanner/dbutils.py @@ -1,6 +1,9 @@ import asyncio import logging +from sqlalchemy.sql.ddl import CreateTable +from sqlalchemy.dialects.postgresql import UUID, INET, TIMESTAMP, FLOAT +from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey import psycopg2 from datetime import datetime from tanner.utils.attack_type import AttackType @@ -15,74 +18,58 @@ async def create_data_tables(pg_client): Arguments: pg_client {aiopg.pool.Pool} """ + meta = MetaData() + sessions = Table( + 'sessions', meta, + Column('id', UUID(as_uuid=True), primary_key=True, unique=True), + Column('sensor_id', UUID(as_uuid=True), primary_key=True, index=True, nullable=False), + Column('ip', INET, nullable=False), + Column('port', Integer, nullable=False), + Column('country', String, nullable=True), + Column('country_code', String, nullable=True), + Column('city', String, nullable=True), + Column('zip_code', Integer, nullable=True), + Column('user_agent', String, nullable=False), + Column('start_time', TIMESTAMP, nullable=False), + Column('end_time', TIMESTAMP, nullable=False), + Column('rps', FLOAT, nullable=False, comment='requests per second'), + Column('atbr', FLOAT, nullable=False, comment='approx_time_between_requests'), + Column('accepted_paths', Integer, nullable=False), + Column('errors', Integer, nullable=False), + Column('hidden_links', Integer, nullable=False), + Column('referer', String, nullable=False), + + ) + + paths = Table( + 'paths', meta, + Column('session_id', UUID(as_uuid=True), ForeignKey('sessions.id'), index=True), + Column('path', String, nullable=False), + Column('created_at', TIMESTAMP), + Column('response_status', Integer, nullable=False), + Column('attack_type', Integer, nullable=False) + ) + cookies = Table( + 'cookies', meta, + Column('session_id', UUID(as_uuid=True), ForeignKey('sessions.id'), index=True), + Column('key', String), + Column('value', String) + ) + owners = Table( + 'owners', meta, + Column('session_id', UUID(as_uuid=True), ForeignKey('sessions.id'), index=True), + Column('owner_type', String), + Column('probability', FLOAT) + ) async with pg_client.acquire() as conn: - async with conn.cursor() as cur: - - await cur.execute( - """ - CREATE TABLE IF NOT EXISTS "sessions" ( - "id" UUID PRIMARY KEY, - "sensor_id" UUID NOT NULL, - "ip" INET NOT NULL, - "port" INT NOT NULL, - "country" TEXT NULL, - "country_code" TEXT NULL, - "city" TEXT NULL, - "zip_code" INT NULL, - "user_agent" TEXT NOT NULL, - "start_time" TIMESTAMP DEFAULT NOW(), - "end_time" TIMESTAMP DEFAULT NOW(), - "rps" FLOAT NOT NULL, - "atbr" FLOAT NOT NULL, - "accepted_paths" INT NOT NULL, - "errors" INT NOT NULL, - "hidden_links" INT NOT NULL, - "referer" TEXT NOT NULL - ) - """ - ) - await cur.execute( - """ - CREATE TABLE IF NOT EXISTS "paths" ( - "session_id" UUID REFERENCES sessions(id), - "path" TEXT NOT NULL, - "created_at" TIMESTAMP DEFAULT now(), - "response_status" INT NOT NULL, - "attack_type" INT NOT NULL - ) - """ - ) - - await cur.execute( - """ - CREATE TABLE IF NOT EXISTS "cookies" ( - "session_id" UUID REFERENCES sessions(id), - "key" TEXT NULL, - "value" TEXT NULL - ) - """ - ) - await cur.execute( - """ - CREATE TABLE IF NOT EXISTS "owners" ( - "session_id" UUID REFERENCES sessions(id), - "owner_type" TEXT, - "probability" FLOAT - ) - """ - ) - await cur.execute( - "comment on column sessions.rps is 'requests per second'" - ) - await cur.execute( - "comment on column sessions.atbr is 'approx_time_between_requests'" - ) - await cur.execute("CREATE INDEX ON sessions(sensor_id)") - await cur.execute('CREATE INDEX ON "paths"(session_id)') - await cur.execute('CREATE INDEX ON "cookies"(session_id)') - await cur.execute('CREATE INDEX ON "owners"(session_id)') - cur.close() - conn.close() + await conn.execute('DROP TABLE IF EXISTS cookies') + await conn.execute('DROP TABLE IF EXISTS paths') + await conn.execute('DROP TABLE IF EXISTS owners') + await conn.execute('DROP TABLE IF EXISTS sessions') + await conn.execute(CreateTable(sessions)) + await conn.execute(CreateTable(paths)) + await conn.execute(CreateTable(cookies)) + await conn.execute(CreateTable(owners)) @staticmethod async def add_analyzed_data(session, pg_client): diff --git a/tanner/postgres_client.py b/tanner/postgres_client.py index 0a96cbfe..27177329 100644 --- a/tanner/postgres_client.py +++ b/tanner/postgres_client.py @@ -2,6 +2,8 @@ import logging import aiopg +from aiopg.sa import create_engine +import sqlalchemy as sa import psycopg2 from tanner.config import TannerConfig @@ -24,12 +26,16 @@ def __init__(self): async def get_pg_client(self): pg_client = None try: - dsn = "dbname={} user={} password={} host={} port={}".format( - self.db_name, self.user, self.password, self.host, self.port - ) - pg_client = await asyncio.wait_for( - aiopg.create_pool(dsn, maxsize=self.poolsize), timeout=int(self.timeout) + create_engine( + host=self.host, + port=self.port, + user=self.user, + password=self.password, + database=self.db_name, + maxsize=self.poolsize + ), + timeout=int(self.timeout) ) except ( asyncio.TimeoutError, From c49d7c7f471fe86bf840aa92d370e76ab10f609b Mon Sep 17 00:00:00 2001 From: mzfr Date: Sat, 13 Jun 2020 11:25:32 +0530 Subject: [PATCH 08/31] Update the analyze function Also make tables accesible for both the functions of a class --- tanner/dbutils.py | 206 +++++++++++++++++++------------------- tanner/postgres_client.py | 2 +- 2 files changed, 102 insertions(+), 106 deletions(-) diff --git a/tanner/dbutils.py b/tanner/dbutils.py index c123c10b..782a8d55 100644 --- a/tanner/dbutils.py +++ b/tanner/dbutils.py @@ -3,11 +3,60 @@ import logging from sqlalchemy.sql.ddl import CreateTable from sqlalchemy.dialects.postgresql import UUID, INET, TIMESTAMP, FLOAT -from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey +from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey, insert, inspect import psycopg2 from datetime import datetime from tanner.utils.attack_type import AttackType +meta = MetaData() +SESSIONS = Table( + "sessions", + meta, + Column("id", UUID(as_uuid=True), primary_key=True, unique=True), + Column( + "sensor_id", UUID(as_uuid=True), primary_key=True, index=True, nullable=False + ), + Column("ip", INET, nullable=False), + Column("port", Integer, nullable=False), + Column("country", String, nullable=True), + Column("country_code", String, nullable=True), + Column("city", String, nullable=True), + Column("zip_code", Integer, nullable=True), + Column("user_agent", String, nullable=False), + Column("start_time", TIMESTAMP, nullable=False), + Column("end_time", TIMESTAMP, nullable=False), + Column("rps", FLOAT, nullable=False, comment="requests per second"), + Column("atbr", FLOAT, nullable=False, comment="approx_time_between_requests"), + Column("accepted_paths", Integer, nullable=False), + Column("errors", Integer, nullable=False), + Column("hidden_links", Integer, nullable=False), + Column("referer", String), +) + +PATHS = Table( + "paths", + meta, + Column("session_id", UUID(as_uuid=True), ForeignKey("sessions.id"), index=True), + Column("path", String, nullable=False), + Column("created_at", TIMESTAMP), + Column("response_status", Integer, nullable=False), + Column("attack_type", Integer, nullable=False), +) +COOKIES = Table( + "cookies", + meta, + Column("session_id", UUID(as_uuid=True), ForeignKey("sessions.id"), index=True), + Column("key", String), + Column("value", String), +) +OWNERS = Table( + "owners", + meta, + Column("session_id", UUID(as_uuid=True), ForeignKey("sessions.id"), index=True), + Column("owner_type", String), + Column("probability", FLOAT), +) + class DBUtils: @staticmethod @@ -16,68 +65,24 @@ async def create_data_tables(pg_client): the postgres database Arguments: - pg_client {aiopg.pool.Pool} + pg_client {aiopg.sa.engine.Engine} """ - meta = MetaData() - sessions = Table( - 'sessions', meta, - Column('id', UUID(as_uuid=True), primary_key=True, unique=True), - Column('sensor_id', UUID(as_uuid=True), primary_key=True, index=True, nullable=False), - Column('ip', INET, nullable=False), - Column('port', Integer, nullable=False), - Column('country', String, nullable=True), - Column('country_code', String, nullable=True), - Column('city', String, nullable=True), - Column('zip_code', Integer, nullable=True), - Column('user_agent', String, nullable=False), - Column('start_time', TIMESTAMP, nullable=False), - Column('end_time', TIMESTAMP, nullable=False), - Column('rps', FLOAT, nullable=False, comment='requests per second'), - Column('atbr', FLOAT, nullable=False, comment='approx_time_between_requests'), - Column('accepted_paths', Integer, nullable=False), - Column('errors', Integer, nullable=False), - Column('hidden_links', Integer, nullable=False), - Column('referer', String, nullable=False), - - ) - - paths = Table( - 'paths', meta, - Column('session_id', UUID(as_uuid=True), ForeignKey('sessions.id'), index=True), - Column('path', String, nullable=False), - Column('created_at', TIMESTAMP), - Column('response_status', Integer, nullable=False), - Column('attack_type', Integer, nullable=False) - ) - cookies = Table( - 'cookies', meta, - Column('session_id', UUID(as_uuid=True), ForeignKey('sessions.id'), index=True), - Column('key', String), - Column('value', String) - ) - owners = Table( - 'owners', meta, - Column('session_id', UUID(as_uuid=True), ForeignKey('sessions.id'), index=True), - Column('owner_type', String), - Column('probability', FLOAT) - ) - async with pg_client.acquire() as conn: - await conn.execute('DROP TABLE IF EXISTS cookies') - await conn.execute('DROP TABLE IF EXISTS paths') - await conn.execute('DROP TABLE IF EXISTS owners') - await conn.execute('DROP TABLE IF EXISTS sessions') - await conn.execute(CreateTable(sessions)) - await conn.execute(CreateTable(paths)) - await conn.execute(CreateTable(cookies)) - await conn.execute(CreateTable(owners)) + Tables = [SESSIONS, PATHS, COOKIES, OWNERS] + async with pg_client.acquire() as conn: + for table in Tables: + try: + await conn.execute(CreateTable(table)) + except psycopg2.errors.DuplicateTable: + continue + @staticmethod async def add_analyzed_data(session, pg_client): """Insert analyzed sessions into postgres Arguments: session {dict} -- dictionary having all the sessions details - pg_client {aiopg.pool.Pool} + pg_client {aiopg.sa.engine.Engine} """ def time_convertor(time): @@ -87,25 +92,7 @@ def time_convertor(time): Arguments: time {str} -- time in epoch format """ - return datetime.fromtimestamp(time).strftime('%Y-%m-%d %H:%M:%S') - - Cookies = "INSERT INTO cookies(session_id, key, value) VALUES('{uuid}', '{key}', '{value}');" - Sessions = ( - "INSERT INTO sessions (id, sensor_id, ip, port, country," - "country_code, city, zip_code, user_agent, start_time," - "end_time, rps, atbr, accepted_paths, errors, hidden_links, referer) " - "VALUES ('{uuid}','{sensor}','{ip}',{port},'{country}'," - "'{ccode}','{city}',{zcode},'{ua}','{st}','{et}',{rps}," - "{atbr},{apaths},{err},{hlinks},'{referer}');" - ) - Paths = ( - "INSERT INTO paths (session_id, path, created_at, response_status, attack_type) " - "VALUES ('{id}','{path}','{time}',{res},{atype});" - ) - Owners = ( - "INSERT INTO owners (session_id, owner_type, probability) " - "VALUES ('{id}', '{key}', {val});" - ) + return datetime.fromtimestamp(time).strftime("%Y-%m-%d %H:%M:%S") start_time = time_convertor(session["start_time"]) end_time = time_convertor(session["end_time"]) @@ -113,51 +100,60 @@ def time_convertor(time): logger = logging.getLogger(__name__) try: - sessions_query = Sessions.format( - uuid=session["sess_uuid"], - sensor=session["snare_uuid"], + sessions_query = insert(SESSIONS).values( + id=session["sess_uuid"], + sensor_id=session["snare_uuid"], ip=session["peer_ip"], port=session["peer_port"], country=session["location"]["country"], - ccode=session["location"]["country_code"], + country_code=session["location"]["country_code"], city=session["location"]["city"], - zcode=session["location"]["zip_code"], - ua=session["user_agent"], - st=start_time, - et=end_time, + zip_code=session["location"]["zip_code"], + user_agent=session["user_agent"], + start_time=start_time, + end_time=end_time, rps=session["requests_in_second"], atbr=session["approx_time_between_requests"], - apaths=session["accepted_paths"], - err=session["errors"], - hlinks=session["hidden_links"], - referer=session["referer"] + accepted_paths=session["accepted_paths"], + errors=session["errors"], + hidden_links=session["hidden_links"], + referer=session["referer"], ) async with pg_client.acquire() as conn: - async with conn.cursor() as cur: - await cur.execute(sessions_query) - for k, v in session["cookies"].items(): - await cur.execute( - Cookies.format(uuid=session["sess_uuid"], key=k, value=v) - ) + await conn.execute(sessions_query) - for path in session["paths"]: - timestamp = time_convertor(path["timestamp"]) - paths_query = Paths.format( - id=session["sess_uuid"], + print("Inserted sessions") + for k, v in session["cookies"].items(): + await conn.execute( + insert(COOKIES).values( + session_id=session["sess_uuid"], key=k, value=v + ) + ) + print("Inserted Cookies") + + for path in session["paths"]: + timestamp = time_convertor(path["timestamp"]) + await conn.execute( + insert(PATHS).values( + session_id=session["sess_uuid"], path=path["path"], - time=timestamp, - res=path["response_status"], - atype=AttackType[path["attack_type"]].value + created_at=timestamp, + response_status=path["response_status"], + attack_type=AttackType[path["attack_type"]].value, ) + ) + print("Inserted Paths") - await cur.execute(paths_query) - - for k, v in session["possible_owners"].items(): - await cur.execute(Owners.format(id=session["sess_uuid"], key=k, val=v)) + for k, v in session["possible_owners"].items(): + await conn.execute( + insert(OWNERS).values( + session_id=session["sess_uuid"], owner_type=k, probability=v + ) + ) - cur.close() - conn.close() + print("Inserted Owners") + await conn.close() except psycopg2.ProgrammingError as pg_error: logger.exception( diff --git a/tanner/postgres_client.py b/tanner/postgres_client.py index 27177329..ab72d0f9 100644 --- a/tanner/postgres_client.py +++ b/tanner/postgres_client.py @@ -33,7 +33,7 @@ async def get_pg_client(self): user=self.user, password=self.password, database=self.db_name, - maxsize=self.poolsize + maxsize=self.poolsize, ), timeout=int(self.timeout) ) From d3b3f4d42c88d2401e70ee3df45882c53bc76271 Mon Sep 17 00:00:00 2001 From: mzfr Date: Sat, 13 Jun 2020 11:30:01 +0530 Subject: [PATCH 09/31] Update API server tests --- tanner/tests/test_api_server.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tanner/tests/test_api_server.py b/tanner/tests/test_api_server.py index bf359011..1e977f7b 100644 --- a/tanner/tests/test_api_server.py +++ b/tanner/tests/test_api_server.py @@ -10,10 +10,10 @@ class TestAPIServer(AioHTTPTestCase): def setUp(self): self.serv = server.ApiServer() - redis = mock.Mock() - redis.close = mock.Mock() - self.serv.redis_client = redis - self.serv.api = api.Api(self.serv.redis_client) + postgres = mock.Mock() + postgres.close = mock.Mock() + self.serv.pg_client = postgres + self.serv.api = api.Api(self.serv.pg_client) super(TestAPIServer, self).setUp() @@ -49,8 +49,8 @@ async def mock_return_snares(): @unittest_run_loop async def test_api_snare_info_request(self): - async def mock_return_snare_info(snare_uuid, count): - if snare_uuid == "8fa6aa98-4283-4085-bfb9-a1cd3a9e56e4" and count == 50: + async def mock_return_snare_info(snare_uuid): + if snare_uuid == "8fa6aa98-4283-4085-bfb9-a1cd3a9e56e4": return [{"test_sess1": "sess1_info"}, {"test_sess1": "sess2_info"}] assert_content = {"version": 1, From 19f6790affdbe3d5bd8adcda42f44d43b3bd3037 Mon Sep 17 00:00:00 2001 From: mzfr Date: Sat, 13 Jun 2020 11:39:04 +0530 Subject: [PATCH 10/31] Fix pycodestyle errors. Also added sqlalchemy in requirements --- requirements.txt | 1 + tanner/api/api.py | 271 +++++++++++++++++++++++----------------------- 2 files changed, 136 insertions(+), 136 deletions(-) diff --git a/requirements.txt b/requirements.txt index 226a84b3..f2ede718 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,3 +19,4 @@ tornado mako pyjwt pyyaml +sqlalchemy diff --git a/tanner/api/api.py b/tanner/api/api.py index 83955388..1da8056a 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -10,7 +10,7 @@ class Api: def __init__(self, pg_client): - self.logger = logging.getLogger('tanner.api.Api') + self.logger = logging.getLogger("tanner.api.Api") self.pg_client = pg_client async def return_snares(self): @@ -41,24 +41,26 @@ async def return_snare_stats(self, snare_uuid): [dict] -- Dictionary containing all stats snare. """ result = {} - result['total_sessions'] = 0 - result['total_duration'] = 0 - result['attack_frequency'] = {'sqli': 0, - 'lfi': 0, - 'xss': 0, - 'rfi': 0, - 'cmd_exec': 0} + result["total_sessions"] = 0 + result["total_duration"] = 0 + result["attack_frequency"] = { + "sqli": 0, + "lfi": 0, + "xss": 0, + "rfi": 0, + "cmd_exec": 0, + } sessions = await self.return_snare_info(snare_uuid) - if sessions == 'Invalid SNARE UUID': + if sessions == "Invalid SNARE UUID": return result - result['total_sessions'] = len(sessions) + result["total_sessions"] = len(sessions) for sess in sessions: - result['total_duration'] += sess['end_time'] - sess['start_time'] - for path in sess['paths']: - if path['attack_type'] in result['attack_frequency']: - result['attack_frequency'][path['attack_type']] += 1 + result["total_duration"] += sess["end_time"] - sess["start_time"] + for path in sess["paths"]: + if path["attack_type"] in result["attack_frequency"]: + result["attack_frequency"][path["attack_type"]] += 1 return result @@ -70,127 +72,123 @@ async def return_snare_info(self, uuid): uuid [string] - Snare UUID """ try: - #generates a ValueError if invalid UUID is given + # generates a ValueError if invalid UUID is given UUID(uuid) - + query_res = [] async with self.pg_client.acquire() as conn: - async with conn.cursor() as cur: - await cur.execute( - """ + async with conn.cursor() as cur: + await cur.execute( + """ SELECT * FROM sessions WHERE sessions.sensor_id = '%s' - """ % (uuid) - ) - while True: - session = await cur.fetchmany(size=200) - - if not session: - break - - for r in session: - sess = { - 'sess_uuid': str(r[0]), - 'snare_uuid': str(r[1]), - 'ip': r[2], - 'port': r[3], - 'location': { - 'country': r[4], - 'country_code': r[5], - 'city': r[6], - 'zip_code': r[7], - }, - 'user_agent': r[8], - 'start_time': r[9].timestamp(), - 'end_time': r[10].timestamp(), - 'request_per_second': r[11], - 'approx_time_between_requests': r[12], - 'accepted_paths': r[13], - 'errors': r[14], - 'hidden_links': r[15], - 'referrer': r[16] - } - - #Extracting all cookies - await cur.execute( - """ + """ + % (uuid) + ) + while True: + session = await cur.fetchmany(size=200) + + if not session: + break + + for r in session: + sess = { + "sess_uuid": str(r[0]), + "snare_uuid": str(r[1]), + "ip": r[2], + "port": r[3], + "location": { + "country": r[4], + "country_code": r[5], + "city": r[6], + "zip_code": r[7], + }, + "user_agent": r[8], + "start_time": r[9].timestamp(), + "end_time": r[10].timestamp(), + "request_per_second": r[11], + "approx_time_between_requests": r[12], + "accepted_paths": r[13], + "errors": r[14], + "hidden_links": r[15], + "referrer": r[16], + } + + # Extracting all cookies + await cur.execute( + """ SELECT * FROM cookies WHERE cookies.session_id = '%s' - """ % (str(r[0])) - ) - - while True: - cookies = await cur.fetchmany(size=200) - - if not cookies: - break - - all_cookies = [] - for r in cookies: - all_cookies.append( - { - r[1]: r[2] - } - ) - sess['cookies'] = dict(ChainMap(*all_cookies)) - - #Extracting all paths - await cur.execute( """ + % (str(r[0])) + ) + + while True: + cookies = await cur.fetchmany(size=200) + + if not cookies: + break + + all_cookies = [] + for r in cookies: + all_cookies.append({r[1]: r[2]}) + sess["cookies"] = dict(ChainMap(*all_cookies)) + + # Extracting all paths + await cur.execute( + """ SELECT * FROM paths WHERE paths.session_id = '%s' - """ % (str(r[0])) - ) - - while True: - paths = await cur.fetchmany(size=200) - - if not paths: - break - all_paths = [] - - for p in paths: - all_paths.append( - { - "path": p[1], - "timestamp": p[2].timestamp(), - "response_status": p[3], - "attack_type": AttackType(p[4]).name - } - ) - sess['paths'] = all_paths - - # Extracting all owners - await cur.execute( """ + % (str(r[0])) + ) + + while True: + paths = await cur.fetchmany(size=200) + + if not paths: + break + all_paths = [] + + for p in paths: + all_paths.append( + { + "path": p[1], + "timestamp": p[2].timestamp(), + "response_status": p[3], + "attack_type": AttackType(p[4]).name, + } + ) + sess["paths"] = all_paths + + # Extracting all owners + await cur.execute( + """ SELECT * FROM owners WHERE owners.session_id = '%s' - """ % (str(r[0])) - ) - - while True: - owners = await cur.fetchmany(size=200) - - if not owners: - break - owner_type = [] - - for o in owners: - owner_type.append( - { - o[1]: o[2] - } - ) - sess['owners'] = dict(ChainMap(*owner_type)) - query_res.append(sess) - cur.close() + """ + % (str(r[0])) + ) + + while True: + owners = await cur.fetchmany(size=200) + + if not owners: + break + owner_type = [] + + for o in owners: + owner_type.append({o[1]: o[2]}) + sess["owners"] = dict(ChainMap(*owner_type)) + query_res.append(sess) + cur.close() conn.close() except ( ValueError, TimeoutError, psycopg2.ProgrammingError, - psycopg2.OperationalError + psycopg2.OperationalError, ): - query_res = 'Invalid SNARE UUID' - + query_res = "Invalid SNARE UUID" + return query_res async def return_session_info(self, sess_uuid, snare_uuid=None): @@ -201,10 +199,10 @@ async def return_session_info(self, sess_uuid, snare_uuid=None): for snare_id in snare_uuids: sessions = await self.return_snare_info(snare_id) - if sessions == 'Invalid SNARE UUID': + if sessions == "Invalid SNARE UUID": continue for sess in sessions: - if sess['sess_uuid'] == sess_uuid: + if sess["sess_uuid"] == sess_uuid: return sess async def return_sessions(self, filters): @@ -213,8 +211,8 @@ async def return_sessions(self, filters): matching_sessions = [] for snare_id in snare_uuids: result = await self.return_snare_info(snare_id) - if result == 'Invalid SNARE UUID': - return 'Invalid filter : SNARE UUID' + if result == "Invalid SNARE UUID": + return "Invalid filter : SNARE UUID" sessions = result for sess in sessions: match_count = 0 @@ -223,7 +221,7 @@ async def return_sessions(self, filters): if self.apply_filter(filter_name, filter_value, sess): match_count += 1 except KeyError: - return 'Invalid filter : %s' % filter_name + return "Invalid filter : %s" % filter_name if match_count == len(filters): matching_sessions.append(sess) @@ -235,26 +233,27 @@ async def return_latest_session(self): snares = await self.return_snares() try: for snare in snares: - filters = {'snare_uuid': snare} + filters = {"snare_uuid": snare} sessions = await self.return_sessions(filters) for session in sessions: - if latest_time < session['end_time']: - latest_time = session['end_time'] - latest_session = session['sess_uuid'] + if latest_time < session["end_time"]: + latest_time = session["end_time"] + latest_session = session["sess_uuid"] except TypeError: return None return latest_session def apply_filter(self, filter_name, filter_value, sess): - available_filters = {'user_agent': operator.contains, - 'peer_ip': operator.eq, - 'attack_types': operator.contains, - 'possible_owners': operator.contains, - 'start_time': operator.le, - 'end_time': operator.ge, - 'snare_uuid': operator.eq, - 'location': operator.contains - } + available_filters = { + "user_agent": operator.contains, + "peer_ip": operator.eq, + "attack_types": operator.contains, + "possible_owners": operator.contains, + "start_time": operator.le, + "end_time": operator.ge, + "snare_uuid": operator.eq, + "location": operator.contains, + } try: if available_filters[filter_name] is operator.contains: From daec0ce27796b3bad58565c50e65435e71a92b9b Mon Sep 17 00:00:00 2001 From: mzfr Date: Mon, 15 Jun 2020 16:02:39 +0530 Subject: [PATCH 11/31] change the insertion method Instead of using insert() function separately we used Tables functionality to insert --- tanner/dbutils.py | 56 +++++++++++++++++++++-------------------------- 1 file changed, 25 insertions(+), 31 deletions(-) diff --git a/tanner/dbutils.py b/tanner/dbutils.py index 782a8d55..74c04124 100644 --- a/tanner/dbutils.py +++ b/tanner/dbutils.py @@ -100,49 +100,43 @@ def time_convertor(time): logger = logging.getLogger(__name__) try: - sessions_query = insert(SESSIONS).values( - id=session["sess_uuid"], - sensor_id=session["snare_uuid"], - ip=session["peer_ip"], - port=session["peer_port"], - country=session["location"]["country"], - country_code=session["location"]["country_code"], - city=session["location"]["city"], - zip_code=session["location"]["zip_code"], - user_agent=session["user_agent"], - start_time=start_time, - end_time=end_time, - rps=session["requests_in_second"], - atbr=session["approx_time_between_requests"], - accepted_paths=session["accepted_paths"], - errors=session["errors"], - hidden_links=session["hidden_links"], - referer=session["referer"], - ) - async with pg_client.acquire() as conn: - await conn.execute(sessions_query) + await conn.execute( + SESSIONS.insert(), + id=session["sess_uuid"], sensor_id=session["snare_uuid"], + ip=session["peer_ip"], port=session["peer_port"], + country=session["location"]["country"], + country_code=session["location"]["country_code"], + city=session["location"]["city"], + zip_code=session["location"]["zip_code"], + user_agent=session["user_agent"], + start_time=start_time, end_time=end_time, + rps=session["requests_in_second"], + atbr=session["approx_time_between_requests"], + accepted_paths=session["accepted_paths"], + errors=session["errors"], + hidden_links=session["hidden_links"], + referer=session["referer"], + ) print("Inserted sessions") for k, v in session["cookies"].items(): await conn.execute( - insert(COOKIES).values( - session_id=session["sess_uuid"], key=k, value=v - ) + COOKIES.insert(), + session_id=session["sess_uuid"], key=k, value=v ) print("Inserted Cookies") for path in session["paths"]: timestamp = time_convertor(path["timestamp"]) await conn.execute( - insert(PATHS).values( - session_id=session["sess_uuid"], - path=path["path"], - created_at=timestamp, - response_status=path["response_status"], - attack_type=AttackType[path["attack_type"]].value, + PATHS.insert(), + session_id=session["sess_uuid"], + path=path["path"], created_at=timestamp, + response_status=path["response_status"], + attack_type=AttackType[path["attack_type"]].value, ) - ) + print("Inserted Paths") for k, v in session["possible_owners"].items(): From 2ba74507fdd2eaee6a1c6dfb6e5e5c493f34e451 Mon Sep 17 00:00:00 2001 From: mzfr Date: Mon, 15 Jun 2020 16:03:04 +0530 Subject: [PATCH 12/31] Change API to make use of sqlalchemy --- tanner/api/api.py | 193 ++++++++++++++++--------------------------- tanner/api/server.py | 4 +- 2 files changed, 72 insertions(+), 125 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 1da8056a..9227dc8a 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -1,14 +1,26 @@ -import json +from json import dumps, loads import logging import operator import psycopg2 +import datetime from asyncio import TimeoutError from uuid import UUID +from sqlalchemy import select +# from sqlalchemy.sql.expression import Select from collections import ChainMap from tanner.utils.attack_type import AttackType +from tanner.dbutils import SESSIONS, PATHS, COOKIES, OWNERS +def alchemyencoder(obj): + """JSON encoder function for SQLAlchemy special classes.""" + if isinstance(obj, datetime.date): + return obj.isoformat() + elif isinstance(obj, UUID): + return str(obj) + class Api: + def __init__(self, pg_client): self.logger = logging.getLogger("tanner.api.Api") self.pg_client = pg_client @@ -21,14 +33,14 @@ async def return_snares(self): [list] -- List containing UUID of all snares """ query_res = [] + async with self.pg_client.acquire() as conn: - async with conn.cursor() as cur: - await cur.execute("SELECT DISTINCT sensor_id FROM sessions") - ans = await cur.fetchall() - for r in ans: - query_res.append(str(r[0])) - cur.close() - conn.close() + stmt = select([SESSIONS.c.sensor_id], distinct=True) + rows = await (await conn.execute(stmt)).fetchall() + for r in rows: + query_res.append(str(r[0])) + + return query_res async def return_snare_stats(self, snare_uuid): @@ -50,17 +62,25 @@ async def return_snare_stats(self, snare_uuid): "rfi": 0, "cmd_exec": 0, } + async with self.pg_client.acquire() as conn: + stmt = select([PATHS.c.attack_type]) + rows = await (await conn.execute(stmt)).fetchall() + result["total_sessions"] = len(rows) + for r in rows: + attack_type = AttackType(r[0]).name + if attack_type in result["attack_frequency"]: + result["attack_frequency"][attack_type] += 1 + + time_stmt = select( + [SESSIONS.c.start_time, SESSIONS.c.end_time] + ).where(SESSIONS.c.sensor_id == snare_uuid) - sessions = await self.return_snare_info(snare_uuid) - if sessions == "Invalid SNARE UUID": - return result + times = await (await conn.execute(time_stmt)).fetchall() - result["total_sessions"] = len(sessions) - for sess in sessions: - result["total_duration"] += sess["end_time"] - sess["start_time"] - for path in sess["paths"]: - if path["attack_type"] in result["attack_frequency"]: - result["attack_frequency"][path["attack_type"]] += 1 + for t in times: + start = t[0].timestamp() + end = t[1].timestamp() + result["total_duration"] += end - start return result @@ -77,110 +97,37 @@ async def return_snare_info(self, uuid): query_res = [] async with self.pg_client.acquire() as conn: - async with conn.cursor() as cur: - await cur.execute( - """ - SELECT * FROM sessions - WHERE - sessions.sensor_id = '%s' - """ - % (uuid) - ) - while True: - session = await cur.fetchmany(size=200) - - if not session: - break - - for r in session: - sess = { - "sess_uuid": str(r[0]), - "snare_uuid": str(r[1]), - "ip": r[2], - "port": r[3], - "location": { - "country": r[4], - "country_code": r[5], - "city": r[6], - "zip_code": r[7], - }, - "user_agent": r[8], - "start_time": r[9].timestamp(), - "end_time": r[10].timestamp(), - "request_per_second": r[11], - "approx_time_between_requests": r[12], - "accepted_paths": r[13], - "errors": r[14], - "hidden_links": r[15], - "referrer": r[16], - } - - # Extracting all cookies - await cur.execute( - """ - SELECT * FROM cookies WHERE cookies.session_id = '%s' - """ - % (str(r[0])) - ) - - while True: - cookies = await cur.fetchmany(size=200) - - if not cookies: - break - - all_cookies = [] - for r in cookies: - all_cookies.append({r[1]: r[2]}) - sess["cookies"] = dict(ChainMap(*all_cookies)) - - # Extracting all paths - await cur.execute( - """ - SELECT * FROM paths WHERE paths.session_id = '%s' - """ - % (str(r[0])) - ) - - while True: - paths = await cur.fetchmany(size=200) - - if not paths: - break - all_paths = [] - - for p in paths: - all_paths.append( - { - "path": p[1], - "timestamp": p[2].timestamp(), - "response_status": p[3], - "attack_type": AttackType(p[4]).name, - } - ) - sess["paths"] = all_paths - - # Extracting all owners - await cur.execute( - """ - SELECT * FROM owners WHERE owners.session_id = '%s' - """ - % (str(r[0])) - ) - - while True: - owners = await cur.fetchmany(size=200) - - if not owners: - break - owner_type = [] - - for o in owners: - owner_type.append({o[1]: o[2]}) - sess["owners"] = dict(ChainMap(*owner_type)) - query_res.append(sess) - cur.close() - conn.close() + stmt = select([SESSIONS]).where(SESSIONS.c.sensor_id == uuid) + query = await (await conn.execute(stmt)).fetchall() + + for row in query: + session = loads(dumps(dict(row), default=alchemyencoder)) + + cookies_query = select([COOKIES]).where(COOKIES.c.session_id == session.get("id")) + cookies = await (await conn.execute(cookies_query)).fetchall() + + all_cookies = [] + for r in cookies: + all_cookies.append({r[1]: r[2]}) + session["cookies"] = dict(ChainMap(*all_cookies)) + + paths_query = select([PATHS]).where(PATHS.c.session_id == session.get("id")) + paths = await (await conn.execute(paths_query)).fetchall() + + all_paths = [] + for p in paths: + all_paths.append(dumps(dict(p), default=alchemyencoder)) + session["paths"] = all_cookies + + owners_query = select([OWNERS]).where(OWNERS.c.session_id == session.get("id")) + owners = await (await conn.execute(owners_query)).fetchall() + + owner_type = [] + for o in owners: + owner_type.append({o[1]: o[2]}) + session["owners"] = dict(ChainMap(*owner_type)) + + query_res.append(session) except ( ValueError, TimeoutError, @@ -202,7 +149,7 @@ async def return_session_info(self, sess_uuid, snare_uuid=None): if sessions == "Invalid SNARE UUID": continue for sess in sessions: - if sess["sess_uuid"] == sess_uuid: + if sess["id"] == sess_uuid: return sess async def return_sessions(self, filters): @@ -251,7 +198,7 @@ def apply_filter(self, filter_name, filter_value, sess): "possible_owners": operator.contains, "start_time": operator.le, "end_time": operator.ge, - "snare_uuid": operator.eq, + "sensor_id": operator.eq, "location": operator.contains, } diff --git a/tanner/api/server.py b/tanner/api/server.py index fe1bf87d..28458f5c 100644 --- a/tanner/api/server.py +++ b/tanner/api/server.py @@ -51,7 +51,7 @@ async def handle_snare_stats(self, request): async def handle_sessions(self, request): snare_uuid = request.match_info['snare_uuid'] params = request.url.query - applied_filters = {'snare_uuid': snare_uuid} + applied_filters = {'sensor_id': snare_uuid} try: if 'filters' in params: for filt in params['filters'].split(): @@ -65,7 +65,7 @@ async def handle_sessions(self, request): result = 'Invalid filter definition' else: sessions = await self.api.return_sessions(applied_filters) - sess_uuids = [sess['sess_uuid'] for sess in sessions] + sess_uuids = [sess['id'] for sess in sessions] result = sess_uuids response_msg = self._make_response(result) return web.json_response(response_msg) From 9d0a86fb9e76fdfee68b77c0de1019b75481c23f Mon Sep 17 00:00:00 2001 From: mzfr Date: Mon, 15 Jun 2020 16:11:22 +0530 Subject: [PATCH 13/31] Fix PEP8 and sort imports --- tanner/api/api.py | 48 ++++++++++++++++++++++++++--------------------- tanner/dbutils.py | 18 ++++++++++-------- 2 files changed, 37 insertions(+), 29 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 9227dc8a..1d621138 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -1,15 +1,16 @@ -from json import dumps, loads +import datetime import logging import operator -import psycopg2 -import datetime from asyncio import TimeoutError +from collections import ChainMap +from json import dumps, loads from uuid import UUID + +import psycopg2 from sqlalchemy import select -# from sqlalchemy.sql.expression import Select -from collections import ChainMap + +from tanner.dbutils import COOKIES, OWNERS, PATHS, SESSIONS from tanner.utils.attack_type import AttackType -from tanner.dbutils import SESSIONS, PATHS, COOKIES, OWNERS def alchemyencoder(obj): @@ -19,8 +20,8 @@ def alchemyencoder(obj): elif isinstance(obj, UUID): return str(obj) + class Api: - def __init__(self, pg_client): self.logger = logging.getLogger("tanner.api.Api") self.pg_client = pg_client @@ -33,14 +34,13 @@ async def return_snares(self): [list] -- List containing UUID of all snares """ query_res = [] - + async with self.pg_client.acquire() as conn: stmt = select([SESSIONS.c.sensor_id], distinct=True) rows = await (await conn.execute(stmt)).fetchall() for r in rows: query_res.append(str(r[0])) - - + return query_res async def return_snare_stats(self, snare_uuid): @@ -69,11 +69,11 @@ async def return_snare_stats(self, snare_uuid): for r in rows: attack_type = AttackType(r[0]).name if attack_type in result["attack_frequency"]: - result["attack_frequency"][attack_type] += 1 + result["attack_frequency"][attack_type] += 1 - time_stmt = select( - [SESSIONS.c.start_time, SESSIONS.c.end_time] - ).where(SESSIONS.c.sensor_id == snare_uuid) + time_stmt = select([SESSIONS.c.start_time, SESSIONS.c.end_time]).where( + SESSIONS.c.sensor_id == snare_uuid + ) times = await (await conn.execute(time_stmt)).fetchall() @@ -99,27 +99,33 @@ async def return_snare_info(self, uuid): async with self.pg_client.acquire() as conn: stmt = select([SESSIONS]).where(SESSIONS.c.sensor_id == uuid) query = await (await conn.execute(stmt)).fetchall() - + for row in query: session = loads(dumps(dict(row), default=alchemyencoder)) - - cookies_query = select([COOKIES]).where(COOKIES.c.session_id == session.get("id")) + + cookies_query = select([COOKIES]).where( + COOKIES.c.session_id == session.get("id") + ) cookies = await (await conn.execute(cookies_query)).fetchall() - + all_cookies = [] for r in cookies: all_cookies.append({r[1]: r[2]}) session["cookies"] = dict(ChainMap(*all_cookies)) - paths_query = select([PATHS]).where(PATHS.c.session_id == session.get("id")) + paths_query = select([PATHS]).where( + PATHS.c.session_id == session.get("id") + ) paths = await (await conn.execute(paths_query)).fetchall() - + all_paths = [] for p in paths: all_paths.append(dumps(dict(p), default=alchemyencoder)) session["paths"] = all_cookies - owners_query = select([OWNERS]).where(OWNERS.c.session_id == session.get("id")) + owners_query = select([OWNERS]).where( + OWNERS.c.session_id == session.get("id") + ) owners = await (await conn.execute(owners_query)).fetchall() owner_type = [] diff --git a/tanner/dbutils.py b/tanner/dbutils.py index 74c04124..f6a72502 100644 --- a/tanner/dbutils.py +++ b/tanner/dbutils.py @@ -1,11 +1,13 @@ import asyncio - import logging -from sqlalchemy.sql.ddl import CreateTable -from sqlalchemy.dialects.postgresql import UUID, INET, TIMESTAMP, FLOAT -from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey, insert, inspect -import psycopg2 from datetime import datetime + +import psycopg2 +from sqlalchemy import (Column, ForeignKey, Integer, MetaData, String, Table, + insert, inspect) +from sqlalchemy.dialects.postgresql import FLOAT, INET, TIMESTAMP, UUID +from sqlalchemy.sql.ddl import CreateTable + from tanner.utils.attack_type import AttackType meta = MetaData() @@ -75,7 +77,7 @@ async def create_data_tables(pg_client): await conn.execute(CreateTable(table)) except psycopg2.errors.DuplicateTable: continue - + @staticmethod async def add_analyzed_data(session, pg_client): """Insert analyzed sessions into postgres @@ -102,7 +104,7 @@ def time_convertor(time): try: async with pg_client.acquire() as conn: await conn.execute( - SESSIONS.insert(), + SESSIONS.insert(), id=session["sess_uuid"], sensor_id=session["snare_uuid"], ip=session["peer_ip"], port=session["peer_port"], country=session["location"]["country"], @@ -130,7 +132,7 @@ def time_convertor(time): for path in session["paths"]: timestamp = time_convertor(path["timestamp"]) await conn.execute( - PATHS.insert(), + PATHS.insert(), session_id=session["sess_uuid"], path=path["path"], created_at=timestamp, response_status=path["response_status"], From a81b9702150acbf5f7ac982a1f59fe21b541734f Mon Sep 17 00:00:00 2001 From: mzfr Date: Fri, 19 Jun 2020 13:18:47 +0530 Subject: [PATCH 14/31] Move alchemyencoder in different class --- tanner/api/api.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 1d621138..8191e766 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -3,7 +3,7 @@ import operator from asyncio import TimeoutError from collections import ChainMap -from json import dumps, loads +from json import dumps, loads, JSONEncoder from uuid import UUID import psycopg2 @@ -13,12 +13,14 @@ from tanner.utils.attack_type import AttackType -def alchemyencoder(obj): - """JSON encoder function for SQLAlchemy special classes.""" - if isinstance(obj, datetime.date): - return obj.isoformat() - elif isinstance(obj, UUID): - return str(obj) +class AlchemyEncoder(JSONEncoder): + def default(self, obj): + """JSON encoder function for SQLAlchemy special classes. + """ + if isinstance(obj, datetime.date): + return obj.isoformat() + elif isinstance(obj, UUID): + return str(obj) class Api: From 92b68c04373096a9f1593b0f9d4c73bee1c92d60 Mon Sep 17 00:00:00 2001 From: mzfr Date: Fri, 19 Jun 2020 13:21:57 +0530 Subject: [PATCH 15/31] Update the return_session function. Also modified the apply_filter function. --- tanner/api/api.py | 96 ++++++++++++++++++++++++----------------------- 1 file changed, 50 insertions(+), 46 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 8191e766..553eaa52 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -151,36 +151,27 @@ async def return_session_info(self, sess_uuid, snare_uuid=None): snare_uuids = [snare_uuid] else: snare_uuids = await self.return_snares() + async def return_sessions(self, filters): + """Returns the list of all the sessions. + Uses apply_filters function in this class + to make the query accordingly. - for snare_id in snare_uuids: - sessions = await self.return_snare_info(snare_id) - if sessions == "Invalid SNARE UUID": - continue - for sess in sessions: - if sess["id"] == sess_uuid: - return sess + Args: + filters (dict): all the filters that is to be applied + + Returns: + [list]: list of sessions + """ + results = [] + stmt = self.apply_filters(filters) + async with self.pg_client.acquire() as conn: + query = await (await conn.execute(stmt)).fetchall() + + for row in query: + results.append(str(row[0])) + + return list(set(results)) - async def return_sessions(self, filters): - snare_uuids = await self.return_snares() - - matching_sessions = [] - for snare_id in snare_uuids: - result = await self.return_snare_info(snare_id) - if result == "Invalid SNARE UUID": - return "Invalid filter : SNARE UUID" - sessions = result - for sess in sessions: - match_count = 0 - for filter_name, filter_value in filters.items(): - try: - if self.apply_filter(filter_name, filter_value, sess): - match_count += 1 - except KeyError: - return "Invalid filter : %s" % filter_name - - if match_count == len(filters): - matching_sessions.append(sess) - return matching_sessions async def return_latest_session(self): latest_time = -1 @@ -198,22 +189,35 @@ async def return_latest_session(self): return None return latest_session - def apply_filter(self, filter_name, filter_value, sess): - available_filters = { - "user_agent": operator.contains, - "peer_ip": operator.eq, - "attack_types": operator.contains, - "possible_owners": operator.contains, - "start_time": operator.le, - "end_time": operator.ge, - "sensor_id": operator.eq, - "location": operator.contains, - } + def apply_filters(self, filters): + """Makes SQL query according to the give filters - try: - if available_filters[filter_name] is operator.contains: - return available_filters[filter_name](sess[filter_name], filter_value) - else: - return available_filters[filter_name](filter_value, sess[filter_name]) - except KeyError: - raise + Args: + filters (dict): all the filters that is to be applied + + Returns: + [str]: A sql query in string format + """ + tables = "sessions S" + columns = "S.id" + where = "S.sensor_id='%s'"%(filters["sensor_id"]) + + if "attack_type" in filters: + tables += ", paths P" + columns += ", P.session_id" + where += " AND P.attack_type=%s"%(filters["attack_type"]) + elif "owners" in filters: + tables += ", owners O" + columns += ", O.session_id" + where += " AND O.owner_type='%s'"%(filters["owners"]) + elif "start_time" in filters: + where += " AND S.start_time=%s"%(filters["start_time"]) + elif "end_time" in filters: + where += " AND S.end_time=%s"%(filters["end_time"]) + elif "peer_ip" in filters: + where += " ANDS.ip='%s'"%(filters["peer_ip"]) + elif "user_agent" in filters: + where += " AND S.user_agent='%s'"%(filters["user_agent"]) + + stmt = "SELECT %s FROM %s WHERE %s"%(columns, tables, where) + return stmt From d6f041b785ac0b1e0098fd08d4c4cfa54524a707 Mon Sep 17 00:00:00 2001 From: mzfr Date: Fri, 19 Jun 2020 13:22:57 +0530 Subject: [PATCH 16/31] Add LIMIT and OFFSET support while returning all the sessions Default for LIMIT is 1000 and OFFSET is 0 --- tanner/api/api.py | 123 ++++++++++++++++++++++++++-------------------- 1 file changed, 70 insertions(+), 53 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 553eaa52..99282ee0 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -7,7 +7,7 @@ from uuid import UUID import psycopg2 -from sqlalchemy import select +from sqlalchemy import select, func from tanner.dbutils import COOKIES, OWNERS, PATHS, SESSIONS from tanner.utils.attack_type import AttackType @@ -65,30 +65,77 @@ async def return_snare_stats(self, snare_uuid): "cmd_exec": 0, } async with self.pg_client.acquire() as conn: - stmt = select([PATHS.c.attack_type]) + stmt = select( + [PATHS.c.attack_type, func.count(PATHS.c.attack_type)] + ).group_by(PATHS.c.attack_type) rows = await (await conn.execute(stmt)).fetchall() - result["total_sessions"] = len(rows) + for r in rows: + result["total_sessions"] += r[1] attack_type = AttackType(r[0]).name if attack_type in result["attack_frequency"]: - result["attack_frequency"][attack_type] += 1 + result["attack_frequency"][attack_type] = r[1] - time_stmt = select([SESSIONS.c.start_time, SESSIONS.c.end_time]).where( - SESSIONS.c.sensor_id == snare_uuid - ) + time_stmt = select( + [func.sum(SESSIONS.c.start_time - SESSIONS.c.end_time)] + ).where(SESSIONS.c.sensor_id == snare_uuid) times = await (await conn.execute(time_stmt)).fetchall() - - for t in times: - start = t[0].timestamp() - end = t[1].timestamp() - result["total_duration"] += end - start + result["total_duration"] = str(times[0][0]) return result - async def return_snare_info(self, uuid): + async def return_session_info(self, sess_uuid): + """This function returns information about single session. + + Args: + sess_uuid (str): UUID of the session for which + the information has to be returned + + Returns: + [dict]: Dictionary having infromation about the session. + """ + try: + UUID(sess_uuid) + except ValueError: + return {"Invalid SESSOIN UUID"} + + async with self.pg_client.acquire() as conn: + stmt = select([SESSIONS]).where(SESSIONS.c.id == sess_uuid) + query = await (await conn.execute(stmt)).fetchone() + session = loads(dumps(dict(query), cls=AlchemyEncoder)) + + cookies_query = select([COOKIES]).where(COOKIES.c.session_id == sess_uuid) + cookies = await (await conn.execute(cookies_query)).fetchall() + + all_cookies = [] + for r in cookies: + all_cookies.append({r[1]: r[2]}) + session["cookies"] = dict(ChainMap(*all_cookies)) + + paths_query = select([PATHS]).where(PATHS.c.session_id == session.get("id")) + paths = await (await conn.execute(paths_query)).fetchall() + + all_paths = [] + for p in paths: + all_paths.append(dumps(dict(p), cls=AlchemyEncoder)) + session["paths"] = all_cookies + + owners_query = select([OWNERS]).where( + OWNERS.c.session_id == session.get("id") + ) + owners = await (await conn.execute(owners_query)).fetchall() + + owner_type = [] + for o in owners: + owner_type.append({o[1]: o[2]}) + session["owners"] = dict(ChainMap(*owner_type)) + + return session + + async def return_snare_info(self, uuid, count, offset): """Returns JSON data that contains information about - all the sessions a single snare instance have. + all the sessions a single snare instance have. Arguments: uuid [string] - Snare UUID @@ -99,43 +146,18 @@ async def return_snare_info(self, uuid): query_res = [] async with self.pg_client.acquire() as conn: - stmt = select([SESSIONS]).where(SESSIONS.c.sensor_id == uuid) + stmt = ( + select([SESSIONS]) + .where(SESSIONS.c.sensor_id == uuid) + .offset(offset) + .limit(count) + ) query = await (await conn.execute(stmt)).fetchall() for row in query: - session = loads(dumps(dict(row), default=alchemyencoder)) - - cookies_query = select([COOKIES]).where( - COOKIES.c.session_id == session.get("id") - ) - cookies = await (await conn.execute(cookies_query)).fetchall() - - all_cookies = [] - for r in cookies: - all_cookies.append({r[1]: r[2]}) - session["cookies"] = dict(ChainMap(*all_cookies)) - - paths_query = select([PATHS]).where( - PATHS.c.session_id == session.get("id") - ) - paths = await (await conn.execute(paths_query)).fetchall() - - all_paths = [] - for p in paths: - all_paths.append(dumps(dict(p), default=alchemyencoder)) - session["paths"] = all_cookies - - owners_query = select([OWNERS]).where( - OWNERS.c.session_id == session.get("id") - ) - owners = await (await conn.execute(owners_query)).fetchall() - - owner_type = [] - for o in owners: - owner_type.append({o[1]: o[2]}) - session["owners"] = dict(ChainMap(*owner_type)) - - query_res.append(session) + session = loads(dumps(dict(row), cls=AlchemyEncoder)) + session_info = await self.return_session_info(session.get("id")) + query_res.append(session_info) except ( ValueError, TimeoutError, @@ -146,11 +168,6 @@ async def return_snare_info(self, uuid): return query_res - async def return_session_info(self, sess_uuid, snare_uuid=None): - if snare_uuid: - snare_uuids = [snare_uuid] - else: - snare_uuids = await self.return_snares() async def return_sessions(self, filters): """Returns the list of all the sessions. Uses apply_filters function in this class From 0f7d9143dfbd75fd360488f74eac2093f7c79f32 Mon Sep 17 00:00:00 2001 From: mzfr Date: Fri, 19 Jun 2020 13:23:13 +0530 Subject: [PATCH 17/31] Update the server calls accordingly --- tanner/api/server.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/tanner/api/server.py b/tanner/api/server.py index 28458f5c..2a024cb4 100644 --- a/tanner/api/server.py +++ b/tanner/api/server.py @@ -38,7 +38,15 @@ async def handle_snares(self, request): async def handle_snare_info(self, request): snare_uuid = request.match_info['snare_uuid'] - result = await self.api.return_snare_info(snare_uuid) + try: + count = int(request.rel_url.query['count']) + offset = int(request.rel_url.query['offset']) + except KeyError: + # Set default values + count = 1000 + offset = 0 + + result = await self.api.return_snare_info(snare_uuid, count, offset) response_msg = self._make_response(result) return web.json_response(response_msg) @@ -65,9 +73,7 @@ async def handle_sessions(self, request): result = 'Invalid filter definition' else: sessions = await self.api.return_sessions(applied_filters) - sess_uuids = [sess['id'] for sess in sessions] - result = sess_uuids - response_msg = self._make_response(result) + response_msg = self._make_response(sessions) return web.json_response(response_msg) async def handle_session_info(self, request): From 2de5ea475e716fddcd1c90fc709379cb0c3c7215 Mon Sep 17 00:00:00 2001 From: mzfr Date: Sun, 21 Jun 2020 14:37:50 +0530 Subject: [PATCH 18/31] Provide all the attacks with their frequency. Also fixed the bug for the session time --- tanner/api/api.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 99282ee0..2286b7a2 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -57,13 +57,7 @@ async def return_snare_stats(self, snare_uuid): result = {} result["total_sessions"] = 0 result["total_duration"] = 0 - result["attack_frequency"] = { - "sqli": 0, - "lfi": 0, - "xss": 0, - "rfi": 0, - "cmd_exec": 0, - } + result["attack_frequency"] = {} async with self.pg_client.acquire() as conn: stmt = select( [PATHS.c.attack_type, func.count(PATHS.c.attack_type)] @@ -73,11 +67,10 @@ async def return_snare_stats(self, snare_uuid): for r in rows: result["total_sessions"] += r[1] attack_type = AttackType(r[0]).name - if attack_type in result["attack_frequency"]: - result["attack_frequency"][attack_type] = r[1] + result["attack_frequency"][attack_type] = r[1] time_stmt = select( - [func.sum(SESSIONS.c.start_time - SESSIONS.c.end_time)] + [func.sum(SESSIONS.c.end_time - SESSIONS.c.start_time)] ).where(SESSIONS.c.sensor_id == snare_uuid) times = await (await conn.execute(time_stmt)).fetchall() @@ -119,7 +112,7 @@ async def return_session_info(self, sess_uuid): all_paths = [] for p in paths: all_paths.append(dumps(dict(p), cls=AlchemyEncoder)) - session["paths"] = all_cookies + session["paths"] = all_paths owners_query = select([OWNERS]).where( OWNERS.c.session_id == session.get("id") From 886a59b975af6aa944843a25f183fc88243a979d Mon Sep 17 00:00:00 2001 From: mzfr Date: Sun, 21 Jun 2020 17:28:21 +0530 Subject: [PATCH 19/31] Remove print statements --- tanner/dbutils.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tanner/dbutils.py b/tanner/dbutils.py index f6a72502..c38dd4fc 100644 --- a/tanner/dbutils.py +++ b/tanner/dbutils.py @@ -121,13 +121,11 @@ def time_convertor(time): referer=session["referer"], ) - print("Inserted sessions") for k, v in session["cookies"].items(): await conn.execute( COOKIES.insert(), session_id=session["sess_uuid"], key=k, value=v ) - print("Inserted Cookies") for path in session["paths"]: timestamp = time_convertor(path["timestamp"]) @@ -139,7 +137,6 @@ def time_convertor(time): attack_type=AttackType[path["attack_type"]].value, ) - print("Inserted Paths") for k, v in session["possible_owners"].items(): await conn.execute( @@ -148,8 +145,6 @@ def time_convertor(time): ) ) - print("Inserted Owners") - await conn.close() except psycopg2.ProgrammingError as pg_error: logger.exception( From fedd0d8502859f1c0a458aca821dbb4bd4ae1d30 Mon Sep 17 00:00:00 2001 From: mzfr Date: Sun, 21 Jun 2020 20:06:53 +0530 Subject: [PATCH 20/31] Fix the filtering bug Instead of using if/elif we have to use if so each condition get checked everytime --- tanner/api/api.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 2286b7a2..01858ba3 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -216,17 +216,17 @@ def apply_filters(self, filters): tables += ", paths P" columns += ", P.session_id" where += " AND P.attack_type=%s"%(filters["attack_type"]) - elif "owners" in filters: + if "owners" in filters: tables += ", owners O" columns += ", O.session_id" where += " AND O.owner_type='%s'"%(filters["owners"]) - elif "start_time" in filters: + if "start_time" in filters: where += " AND S.start_time=%s"%(filters["start_time"]) - elif "end_time" in filters: + if "end_time" in filters: where += " AND S.end_time=%s"%(filters["end_time"]) - elif "peer_ip" in filters: + if "peer_ip" in filters: where += " ANDS.ip='%s'"%(filters["peer_ip"]) - elif "user_agent" in filters: + if "user_agent" in filters: where += " AND S.user_agent='%s'"%(filters["user_agent"]) stmt = "SELECT %s FROM %s WHERE %s"%(columns, tables, where) From 368453d69ef68f8d9a9751c31be8ff3cbdcd8d64 Mon Sep 17 00:00:00 2001 From: mzfr Date: Sat, 27 Jun 2020 14:23:45 +0530 Subject: [PATCH 21/31] Fix a bug There was a small bug with the paths that were being added to the redis and that were used to set attack_type, resulted in lesser number of sessions to be analyzed --- tanner/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tanner/server.py b/tanner/server.py index 9afc8732..e34bd4d0 100644 --- a/tanner/server.py +++ b/tanner/server.py @@ -66,7 +66,7 @@ async def handle_event(self, request): self.logger.info("Requested path %s", path) await self.dorks.extract_path(path, self.redis_client) detection = await self.base_handler.handle(data, session) - session.set_attack_type(path, detection["name"]) + session.set_attack_type(data['path'], detection["name"]) response_msg = self._make_response( msg=dict(detection=detection, sess_uuid=session.get_uuid()) From 0abfa111abb0ef4b5f411ef8a230cb719f4fe0ff Mon Sep 17 00:00:00 2001 From: mzfr Date: Sat, 27 Jun 2020 14:24:29 +0530 Subject: [PATCH 22/31] Update the API to return error as well useful while write tests --- tanner/api/api.py | 50 ++++++++++++++++++++++++++++++++--------------- 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 01858ba3..60a37b0e 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -91,7 +91,7 @@ async def return_session_info(self, sess_uuid): try: UUID(sess_uuid) except ValueError: - return {"Invalid SESSOIN UUID"} + return {"Invalid SESSION UUID"} async with self.pg_client.acquire() as conn: stmt = select([SESSIONS]).where(SESSIONS.c.id == sess_uuid) @@ -173,15 +173,33 @@ async def return_sessions(self, filters): [list]: list of sessions """ results = [] - stmt = self.apply_filters(filters) - async with self.pg_client.acquire() as conn: - query = await (await conn.execute(stmt)).fetchall() + invalid_filters = [] + filter_list = [ + "attack_type", + "owners", + "start_time", + "end_time", + "peer_ip", + "user_agent", + "sensor_id", + ] + + for fil in filters: + if fil not in filter_list: + invalid_filters.append(fil) + + if invalid_filters: + results = "Invalid filters" + else: + stmt = self.apply_filters(filters) + async with self.pg_client.acquire() as conn: + query = await (await conn.execute(stmt)).fetchall() - for row in query: - results.append(str(row[0])) - - return list(set(results)) + for row in query: + results.append(str(row[0])) + results = list(set(results)) + return results async def return_latest_session(self): latest_time = -1 @@ -210,24 +228,24 @@ def apply_filters(self, filters): """ tables = "sessions S" columns = "S.id" - where = "S.sensor_id='%s'"%(filters["sensor_id"]) + where = "S.sensor_id='%s'" % (filters["sensor_id"]) if "attack_type" in filters: tables += ", paths P" columns += ", P.session_id" - where += " AND P.attack_type=%s"%(filters["attack_type"]) + where += " AND P.attack_type=%s" % (filters["attack_type"]) if "owners" in filters: tables += ", owners O" columns += ", O.session_id" - where += " AND O.owner_type='%s'"%(filters["owners"]) + where += " AND O.owner_type='%s'" % (filters["owners"]) if "start_time" in filters: - where += " AND S.start_time=%s"%(filters["start_time"]) + where += " AND S.start_time=%s" % (filters["start_time"]) if "end_time" in filters: - where += " AND S.end_time=%s"%(filters["end_time"]) + where += " AND S.end_time=%s" % (filters["end_time"]) if "peer_ip" in filters: - where += " ANDS.ip='%s'"%(filters["peer_ip"]) + where += " ANDS.ip='%s'" % (filters["peer_ip"]) if "user_agent" in filters: - where += " AND S.user_agent='%s'"%(filters["user_agent"]) + where += " AND S.user_agent='%s'" % (filters["user_agent"]) - stmt = "SELECT %s FROM %s WHERE %s"%(columns, tables, where) + stmt = "SELECT %s FROM %s WHERE %s" % (columns, tables, where) return stmt From 997313eac2fce78ebce2715efe78df87c460ddb3 Mon Sep 17 00:00:00 2001 From: mzfr Date: Sat, 27 Jun 2020 14:24:46 +0530 Subject: [PATCH 23/31] Properly format the code --- tanner/dbutils.py | 35 +++++++++++++++++++++++------------ 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/tanner/dbutils.py b/tanner/dbutils.py index c38dd4fc..7275539e 100644 --- a/tanner/dbutils.py +++ b/tanner/dbutils.py @@ -3,8 +3,16 @@ from datetime import datetime import psycopg2 -from sqlalchemy import (Column, ForeignKey, Integer, MetaData, String, Table, - insert, inspect) +from sqlalchemy import ( + Column, + ForeignKey, + Integer, + MetaData, + String, + Table, + insert, + inspect, +) from sqlalchemy.dialects.postgresql import FLOAT, INET, TIMESTAMP, UUID from sqlalchemy.sql.ddl import CreateTable @@ -105,14 +113,17 @@ def time_convertor(time): async with pg_client.acquire() as conn: await conn.execute( SESSIONS.insert(), - id=session["sess_uuid"], sensor_id=session["snare_uuid"], - ip=session["peer_ip"], port=session["peer_port"], + id=session["sess_uuid"], + sensor_id=session["snare_uuid"], + ip=session["peer_ip"], + port=session["peer_port"], country=session["location"]["country"], country_code=session["location"]["country_code"], city=session["location"]["city"], zip_code=session["location"]["zip_code"], user_agent=session["user_agent"], - start_time=start_time, end_time=end_time, + start_time=start_time, + end_time=end_time, rps=session["requests_in_second"], atbr=session["approx_time_between_requests"], accepted_paths=session["accepted_paths"], @@ -124,7 +135,9 @@ def time_convertor(time): for k, v in session["cookies"].items(): await conn.execute( COOKIES.insert(), - session_id=session["sess_uuid"], key=k, value=v + session_id=session["sess_uuid"], + key=k, + value=v, ) for path in session["paths"]: @@ -132,11 +145,11 @@ def time_convertor(time): await conn.execute( PATHS.insert(), session_id=session["sess_uuid"], - path=path["path"], created_at=timestamp, + path=path["path"], + created_at=timestamp, response_status=path["response_status"], attack_type=AttackType[path["attack_type"]].value, - ) - + ) for k, v in session["possible_owners"].items(): await conn.execute( @@ -145,9 +158,7 @@ def time_convertor(time): ) ) - except psycopg2.ProgrammingError as pg_error: logger.exception( - "Error with Postgres. Session not added to postgres: %s", - pg_error, + "Error with Postgres. Session not added to postgres: %s", pg_error, ) From 0fee2265fefcbc65f4f49b8e1c30fc50adb9fa8e Mon Sep 17 00:00:00 2001 From: mzfr Date: Tue, 30 Jun 2020 18:18:37 +0530 Subject: [PATCH 24/31] use urllib instead of yarl to extract queries from path --- tanner/emulators/base.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/tanner/emulators/base.py b/tanner/emulators/base.py index 9a141e0f..09741b66 100644 --- a/tanner/emulators/base.py +++ b/tanner/emulators/base.py @@ -1,6 +1,6 @@ import mimetypes import re -import urllib.parse +from urllib.parse import unquote, parse_qsl, urlparse import yarl from tanner import __version__ as tanner_version @@ -52,14 +52,11 @@ def extract_get_data(self, path): """ Return all the GET parameter :param path (str): The URL path from which GET parameters are to be extracted - :return: A MultiDictProxy object containg name and value of parameters + :return: A dict containg name and value of parameters """ - path = urllib.parse.unquote(path) - encodings = [('&&', '%26%26'), (';', '%3B')] - for value, encoded_value in encodings: - path = path.replace(value, encoded_value) - get_data = yarl.URL(path).query - return get_data + path = urlparse(path) + queries = parse_qsl(unquote(path.query)) + return dict(queries) async def get_emulation_result(self, session, data, target_emulators): """ From 0b3a09a68a92198b648f3152ef3821b7d503ed3d Mon Sep 17 00:00:00 2001 From: mzfr Date: Tue, 30 Jun 2020 18:45:04 +0530 Subject: [PATCH 25/31] Add unknown to attack type --- tanner/utils/attack_type.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tanner/utils/attack_type.py b/tanner/utils/attack_type.py index 8ec4744e..1a34bfcb 100644 --- a/tanner/utils/attack_type.py +++ b/tanner/utils/attack_type.py @@ -3,6 +3,7 @@ @unique class AttackType(Enum): + unknown = 0 rfi = 1 lfi = 2 xss = 3 From 43d7c82e912e192ba724d822d0cace6a0248bb54 Mon Sep 17 00:00:00 2001 From: mzfr Date: Tue, 30 Jun 2020 21:35:56 +0530 Subject: [PATCH 26/31] if attack_type is not present then pass 0(unknown) This should only happen if right after sending the request tanner shutdowns --- tanner/dbutils.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tanner/dbutils.py b/tanner/dbutils.py index 7275539e..9bf44bc6 100644 --- a/tanner/dbutils.py +++ b/tanner/dbutils.py @@ -142,13 +142,17 @@ def time_convertor(time): for path in session["paths"]: timestamp = time_convertor(path["timestamp"]) + try: + attackType = AttackType[path["attack_type"]].value + except KeyError: + attackType = 0 await conn.execute( PATHS.insert(), session_id=session["sess_uuid"], path=path["path"], created_at=timestamp, response_status=path["response_status"], - attack_type=AttackType[path["attack_type"]].value, + attack_type=attackType, ) for k, v in session["possible_owners"].items(): From 117d23371dcc731a07f6bbe60684b5f11eb1d700 Mon Sep 17 00:00:00 2001 From: mzfr Date: Thu, 2 Jul 2020 16:25:11 +0530 Subject: [PATCH 27/31] Update the sessions and stat postgres query Also make attack_type filter work with names --- tanner/api/api.py | 30 ++++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 60a37b0e..9c2e94f8 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -7,6 +7,7 @@ from uuid import UUID import psycopg2 +from sqlalchemy.sql.expression import join from sqlalchemy import select, func from tanner.dbutils import COOKIES, OWNERS, PATHS, SESSIONS @@ -62,6 +63,9 @@ async def return_snare_stats(self, snare_uuid): stmt = select( [PATHS.c.attack_type, func.count(PATHS.c.attack_type)] ).group_by(PATHS.c.attack_type) + stmt = stmt.select_from( + join(SESSIONS, PATHS, SESSIONS.c.id == PATHS.c.session_id) + ).where(SESSIONS.c.sensor_id == snare_uuid) rows = await (await conn.execute(stmt)).fetchall() for r in rows: @@ -192,12 +196,15 @@ async def return_sessions(self, filters): results = "Invalid filters" else: stmt = self.apply_filters(filters) - async with self.pg_client.acquire() as conn: - query = await (await conn.execute(stmt)).fetchall() + if stmt != "Invalid filter value": + async with self.pg_client.acquire() as conn: + query = await (await conn.execute(stmt)).fetchall() - for row in query: - results.append(str(row[0])) - results = list(set(results)) + for row in query: + results.append(str(row[0])) + results = list(set(results)) + else: + results = stmt return results @@ -233,17 +240,24 @@ def apply_filters(self, filters): if "attack_type" in filters: tables += ", paths P" columns += ", P.session_id" - where += " AND P.attack_type=%s" % (filters["attack_type"]) + try: + attack_type = AttackType[filters["attack_type"]].value + except KeyError: + return "Invalid filter value" + + where += " AND P.attack_type=%s AND S.id=P.session_id" % (attack_type) if "owners" in filters: tables += ", owners O" columns += ", O.session_id" - where += " AND O.owner_type='%s'" % (filters["owners"]) + where += " AND O.owner_type='%s' AND S.id=O.session_id" % ( + filters["owners"] + ) if "start_time" in filters: where += " AND S.start_time=%s" % (filters["start_time"]) if "end_time" in filters: where += " AND S.end_time=%s" % (filters["end_time"]) if "peer_ip" in filters: - where += " ANDS.ip='%s'" % (filters["peer_ip"]) + where += " AND S.ip='%s'" % (filters["peer_ip"]) if "user_agent" in filters: where += " AND S.user_agent='%s'" % (filters["user_agent"]) From 340a7fa718c1dde82f9303ea805a6b8c5c8e4eff Mon Sep 17 00:00:00 2001 From: mzfr Date: Sat, 4 Jul 2020 14:36:21 +0530 Subject: [PATCH 28/31] Update tannerweb and it's template to display content properly --- tanner/web/server.py | 23 +++++++++----- tanner/web/templates/session.html | 51 ++++++++++++++++--------------- 2 files changed, 42 insertions(+), 32 deletions(-) diff --git a/tanner/web/server.py b/tanner/web/server.py index b9e8c709..7572849e 100644 --- a/tanner/web/server.py +++ b/tanner/web/server.py @@ -5,7 +5,7 @@ from aiohttp import web from tanner.api import api -from tanner import redis_client +from tanner import postgres_client from tanner.config import TannerConfig from tanner import __version__ as tanner_version @@ -54,21 +54,26 @@ async def handle_sessions(self, request): snare_uuid = request.match_info['snare_uuid'] page_id = int(request.match_info['page_id']) params = request.url.query - applied_filters = {'snare_uuid': snare_uuid} + applied_filters = {'sensor_id': snare_uuid} try: if 'filters' in params: for filt in params['filters'].split(): - applied_filters[filt.split(':')[0]] = filt.split(':')[1] + applied_filters[filt.split(':', 1)[0]] = filt.split(':', 1)[1] if 'start_time' in applied_filters: - applied_filters['start_time'] = float(applied_filters['start_time']) + applied_filters['start_time'] = applied_filters['start_time'] if 'end_time' in applied_filters: - applied_filters['end_time'] = float(applied_filters['end_time']) + applied_filters['end_time'] = applied_filters['end_time'] except Exception as e: self.logger.exception('Filter error : %s' % e) result = 'Invalid filter definition' else: + result = [] sessions = await self.api.return_sessions(applied_filters) - result = sessions[15 * (page_id - 1):15 * page_id] + partial_sessions = sessions[15 * (page_id - 1):15 * page_id] + for sess in partial_sessions: + info = await self.api.return_session_info(sess) + result.append({"sess_uuid": info["id"], "peer_ip": info["ip"], "possible_owners": info["owners"]}) + next_val = None pre_val = None if page_id * 15 <= len(sessions): @@ -120,8 +125,10 @@ def create_app(self, loop): def start(self): loop = asyncio.get_event_loop() - self.redis_client = loop.run_until_complete(redis_client.RedisClient.get_redis_client(poolsize=20)) - self.api = api.Api(self.redis_client) + self.pg_client = loop.run_until_complete( + postgres_client.PostgresClient().get_pg_client() + ) + self.api = api.Api(self.pg_client) app = self.create_app(loop) host = TannerConfig.get('WEB', 'host') port = int(TannerConfig.get('WEB', 'port')) diff --git a/tanner/web/templates/session.html b/tanner/web/templates/session.html index 67c996ec..acb865e7 100644 --- a/tanner/web/templates/session.html +++ b/tanner/web/templates/session.html @@ -11,19 +11,29 @@

SESSION INFO

UUID - {{session.sess_uuid}} + {{session.id}} IP - {{session.peer_ip}} - - - Location - {{session.location}} - + {{session.ip}} + + {% if session.country and session.city and session.zip_code %} + + Country + {{session.country}} + + + Country + {{session.country}} + + + Country + {{session.country}} + + {% endif %} Port - {{session.peer_port}} + {{session.port}} User Agents @@ -31,23 +41,23 @@

SESSION INFO

Snare UUID - {{session.snare_uuid}} + {{session.sensor_id}} Start time - {{session.start_time}} + {{session.start_time}} End time - {{session.end_time}} + {{session.end_time}} Requests/sec - {{session.requests_in_second}} + {{session.rps}} Time between requests - {{session.approx_time_between_requests}} + {{session.atbr}} Paths @@ -61,14 +71,7 @@

SESSION INFO

Hidden Links {{session.hidden_links}} - - Attacks - - {% for attack, v in session.attack_count.items() %} - {{attack}} - {{v}}
- {% endfor %} - - + Paths @@ -92,10 +95,10 @@

SESSION INFO

Possible Owners - {% for owner, cf in session.possible_owners.items() %} - {{owner}} : {{cf}}
+ {% for owner, cf in session.owners.items() %} + {{owner}} : {{cf}}
{% endfor %} -{% endblock %} \ No newline at end of file +{% endblock %} From 6f72b84f7e933d9785eb4ebd455e3663c552f97a Mon Sep 17 00:00:00 2001 From: mzfr Date: Sat, 4 Jul 2020 14:37:47 +0530 Subject: [PATCH 29/31] Make separate query for fetching total sessions --- tanner/api/api.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 9c2e94f8..637743d1 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -69,10 +69,16 @@ async def return_snare_stats(self, snare_uuid): rows = await (await conn.execute(stmt)).fetchall() for r in rows: - result["total_sessions"] += r[1] attack_type = AttackType(r[0]).name result["attack_frequency"][attack_type] = r[1] + total_session_stmt = select( + [func.count(SESSIONS.c.id)], distinct=True + ).where(SESSIONS.c.sensor_id == snare_uuid) + total_count = await (await conn.execute(total_session_stmt)).first() + + result["total_sessions"] = total_count[0] + time_stmt = select( [func.sum(SESSIONS.c.end_time - SESSIONS.c.start_time)] ).where(SESSIONS.c.sensor_id == snare_uuid) From 815e30a740a7f0f040d5082cb5e258ab0e78b2df Mon Sep 17 00:00:00 2001 From: mzfr Date: Sat, 4 Jul 2020 14:39:18 +0530 Subject: [PATCH 30/31] Update filters so api/web doesn't crash when given start_time/end_time in human readable format --- tanner/api/api.py | 24 ++++++++++++++++++++++-- tanner/api/server.py | 6 +++--- 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index 637743d1..abda7c6a 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -239,6 +239,24 @@ def apply_filters(self, filters): Returns: [str]: A sql query in string format """ + + def check_time(time): + """Check the format of the time passed in filters + + Args: + time ([str]): Time in RFC format + + Returns: + [str]: Time in humar readable format + """ + try: + formatted_time = datetime.datetime.strptime(time, "%d-%m-%YT%H:%M:%S") + except ValueError: + time = time + "T00:00:00" + formatted_time = datetime.datetime.strptime(time, "%d-%m-%YT%H:%M:%S") + + return str(formatted_time) + tables = "sessions S" columns = "S.id" where = "S.sensor_id='%s'" % (filters["sensor_id"]) @@ -259,9 +277,11 @@ def apply_filters(self, filters): filters["owners"] ) if "start_time" in filters: - where += " AND S.start_time=%s" % (filters["start_time"]) + start_time = check_time(filters["start_time"]) + where += " AND S.start_time>='%s'" % (start_time) if "end_time" in filters: - where += " AND S.end_time=%s" % (filters["end_time"]) + end_time = check_time(filters["end_time"]) + where += " AND S.end_time<='%s'" % (end_time) if "peer_ip" in filters: where += " AND S.ip='%s'" % (filters["peer_ip"]) if "user_agent" in filters: diff --git a/tanner/api/server.py b/tanner/api/server.py index 2a024cb4..e6cdac02 100644 --- a/tanner/api/server.py +++ b/tanner/api/server.py @@ -63,11 +63,11 @@ async def handle_sessions(self, request): try: if 'filters' in params: for filt in params['filters'].split(): - applied_filters[filt.split(':')[0]] = filt.split(':')[1] + applied_filters[filt.split(':', 1)[0]] = filt.split(':', 1)[1] if 'start_time' in applied_filters: - applied_filters['start_time'] = float(applied_filters['start_time']) + applied_filters['start_time'] = applied_filters['start_time'] if 'end_time' in applied_filters: - applied_filters['end_time'] = float(applied_filters['end_time']) + applied_filters['end_time'] = applied_filters['end_time'] except Exception as e: self.logger.exception('Filter error : %s' % e) result = 'Invalid filter definition' From d929b9c9a37d4c03044c99d4134605a0bf28b16c Mon Sep 17 00:00:00 2001 From: mzfr Date: Sat, 4 Jul 2020 14:41:25 +0530 Subject: [PATCH 31/31] Add exception to return session info function If someone passes a valid UUID but there is no session of that UUID then API would crash so to prevent that I added try and except --- tanner/api/api.py | 60 ++++++++++++++++++++++++++++------------------- 1 file changed, 36 insertions(+), 24 deletions(-) diff --git a/tanner/api/api.py b/tanner/api/api.py index abda7c6a..1088e074 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -103,36 +103,48 @@ async def return_session_info(self, sess_uuid): except ValueError: return {"Invalid SESSION UUID"} - async with self.pg_client.acquire() as conn: - stmt = select([SESSIONS]).where(SESSIONS.c.id == sess_uuid) - query = await (await conn.execute(stmt)).fetchone() - session = loads(dumps(dict(query), cls=AlchemyEncoder)) + try: + async with self.pg_client.acquire() as conn: + stmt = select([SESSIONS]).where(SESSIONS.c.id == sess_uuid) + query = await (await conn.execute(stmt)).fetchone() + session = loads(dumps(dict(query), cls=AlchemyEncoder)) - cookies_query = select([COOKIES]).where(COOKIES.c.session_id == sess_uuid) - cookies = await (await conn.execute(cookies_query)).fetchall() + cookies_query = select([COOKIES]).where( + COOKIES.c.session_id == sess_uuid + ) + cookies = await (await conn.execute(cookies_query)).fetchall() - all_cookies = [] - for r in cookies: - all_cookies.append({r[1]: r[2]}) - session["cookies"] = dict(ChainMap(*all_cookies)) + all_cookies = [] + for r in cookies: + all_cookies.append({r[1]: r[2]}) + session["cookies"] = dict(ChainMap(*all_cookies)) - paths_query = select([PATHS]).where(PATHS.c.session_id == session.get("id")) - paths = await (await conn.execute(paths_query)).fetchall() + paths_query = select([PATHS]).where( + PATHS.c.session_id == session.get("id") + ) + paths = await (await conn.execute(paths_query)).fetchall() - all_paths = [] - for p in paths: - all_paths.append(dumps(dict(p), cls=AlchemyEncoder)) - session["paths"] = all_paths + all_paths = [] + for p in paths: + all_paths.append(dumps(dict(p), cls=AlchemyEncoder)) + session["paths"] = all_paths - owners_query = select([OWNERS]).where( - OWNERS.c.session_id == session.get("id") - ) - owners = await (await conn.execute(owners_query)).fetchall() + owners_query = select([OWNERS]).where( + OWNERS.c.session_id == session.get("id") + ) + owners = await (await conn.execute(owners_query)).fetchall() - owner_type = [] - for o in owners: - owner_type.append({o[1]: o[2]}) - session["owners"] = dict(ChainMap(*owner_type)) + owner_type = [] + for o in owners: + owner_type.append({o[1]: o[2]}) + session["owners"] = dict(ChainMap(*owner_type)) + except ( + TypeError, + TimeoutError, + psycopg2.ProgrammingError, + psycopg2.OperationalError, + ): + session = {"error": "Invalid session ID"} return session