diff --git a/requirements.txt b/requirements.txt index 226a84b3..f2ede718 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,3 +19,4 @@ tornado mako pyjwt pyyaml +sqlalchemy diff --git a/tanner/api/api.py b/tanner/api/api.py index 2908f0c4..1088e074 100644 --- a/tanner/api/api.py +++ b/tanner/api/api.py @@ -1,96 +1,230 @@ -import json +import datetime import logging import operator -import aioredis +from asyncio import TimeoutError +from collections import ChainMap +from json import dumps, loads, JSONEncoder +from uuid import UUID + +import psycopg2 +from sqlalchemy.sql.expression import join +from sqlalchemy import select, func + +from tanner.dbutils import COOKIES, OWNERS, PATHS, SESSIONS +from tanner.utils.attack_type import AttackType + + +class AlchemyEncoder(JSONEncoder): + def default(self, obj): + """JSON encoder function for SQLAlchemy special classes. + """ + if isinstance(obj, datetime.date): + return obj.isoformat() + elif isinstance(obj, UUID): + return str(obj) class Api: - def __init__(self, redis_client): - self.logger = logging.getLogger('tanner.api.Api') - self.redis_client = redis_client + def __init__(self, pg_client): + self.logger = logging.getLogger("tanner.api.Api") + self.pg_client = pg_client async def return_snares(self): + """Returns a list of all the snares that are + connected to the tanner. + + Returns: + [list] -- List containing UUID of all snares + """ query_res = [] - try: - query_res = await self.redis_client.smembers('snare_ids', encoding='utf-8') - except aioredis.ProtocolError as connection_error: - self.logger.exception('Can not connect to redis %s', connection_error) - return list(query_res) + + async with self.pg_client.acquire() as conn: + stmt = select([SESSIONS.c.sensor_id], distinct=True) + rows = await (await conn.execute(stmt)).fetchall() + for r in rows: + query_res.append(str(r[0])) + + return query_res async def return_snare_stats(self, snare_uuid): + """Returns the stats of the given snare + + Arguments: + snare_uuid {uuid} -- UUID of snare + + Returns: + [dict] -- Dictionary containing all stats snare. + """ result = {} - result['total_sessions'] = 0 - result['total_duration'] = 0 - result['attack_frequency'] = {'sqli': 0, - 'lfi': 0, - 'xss': 0, - 'rfi': 0, - 'cmd_exec': 0} - - sessions = await self.return_snare_info(snare_uuid) - if sessions == 'Invalid SNARE UUID': - return result - - result['total_sessions'] = len(sessions) - for sess in sessions: - result['total_duration'] += sess['end_time'] - sess['start_time'] - for attack in sess['attack_types']: - if attack in result['attack_frequency']: - result['attack_frequency'][attack] += 1 + result["total_sessions"] = 0 + result["total_duration"] = 0 + result["attack_frequency"] = {} + async with self.pg_client.acquire() as conn: + stmt = select( + [PATHS.c.attack_type, func.count(PATHS.c.attack_type)] + ).group_by(PATHS.c.attack_type) + stmt = stmt.select_from( + join(SESSIONS, PATHS, SESSIONS.c.id == PATHS.c.session_id) + ).where(SESSIONS.c.sensor_id == snare_uuid) + rows = await (await conn.execute(stmt)).fetchall() + + for r in rows: + attack_type = AttackType(r[0]).name + result["attack_frequency"][attack_type] = r[1] + + total_session_stmt = select( + [func.count(SESSIONS.c.id)], distinct=True + ).where(SESSIONS.c.sensor_id == snare_uuid) + total_count = await (await conn.execute(total_session_stmt)).first() + + result["total_sessions"] = total_count[0] + + time_stmt = select( + [func.sum(SESSIONS.c.end_time - SESSIONS.c.start_time)] + ).where(SESSIONS.c.sensor_id == snare_uuid) + + times = await (await conn.execute(time_stmt)).fetchall() + result["total_duration"] = str(times[0][0]) return result - async def return_snare_info(self, uuid, count=-1): - query_res = [] + async def return_session_info(self, sess_uuid): + """This function returns information about single session. + + Args: + sess_uuid (str): UUID of the session for which + the information has to be returned + + Returns: + [dict]: Dictionary having infromation about the session. + """ try: - query_res = await self.redis_client.zrevrangebyscore( - uuid, offset=0, count=count, encoding='utf-8' - ) - except aioredis.ProtocolError as connection_error: - self.logger.exception('Can not connect to redis %s', connection_error) - else: - if not query_res: - return 'Invalid SNARE UUID' - for (i, val) in enumerate(query_res): - query_res[i] = json.loads(val) + UUID(sess_uuid) + except ValueError: + return {"Invalid SESSION UUID"} + + try: + async with self.pg_client.acquire() as conn: + stmt = select([SESSIONS]).where(SESSIONS.c.id == sess_uuid) + query = await (await conn.execute(stmt)).fetchone() + session = loads(dumps(dict(query), cls=AlchemyEncoder)) + + cookies_query = select([COOKIES]).where( + COOKIES.c.session_id == sess_uuid + ) + cookies = await (await conn.execute(cookies_query)).fetchall() + + all_cookies = [] + for r in cookies: + all_cookies.append({r[1]: r[2]}) + session["cookies"] = dict(ChainMap(*all_cookies)) + + paths_query = select([PATHS]).where( + PATHS.c.session_id == session.get("id") + ) + paths = await (await conn.execute(paths_query)).fetchall() + + all_paths = [] + for p in paths: + all_paths.append(dumps(dict(p), cls=AlchemyEncoder)) + session["paths"] = all_paths + + owners_query = select([OWNERS]).where( + OWNERS.c.session_id == session.get("id") + ) + owners = await (await conn.execute(owners_query)).fetchall() + + owner_type = [] + for o in owners: + owner_type.append({o[1]: o[2]}) + session["owners"] = dict(ChainMap(*owner_type)) + except ( + TypeError, + TimeoutError, + psycopg2.ProgrammingError, + psycopg2.OperationalError, + ): + session = {"error": "Invalid session ID"} + + return session + + async def return_snare_info(self, uuid, count, offset): + """Returns JSON data that contains information about + all the sessions a single snare instance have. + + Arguments: + uuid [string] - Snare UUID + """ + try: + # generates a ValueError if invalid UUID is given + UUID(uuid) + + query_res = [] + async with self.pg_client.acquire() as conn: + stmt = ( + select([SESSIONS]) + .where(SESSIONS.c.sensor_id == uuid) + .offset(offset) + .limit(count) + ) + query = await (await conn.execute(stmt)).fetchall() + + for row in query: + session = loads(dumps(dict(row), cls=AlchemyEncoder)) + session_info = await self.return_session_info(session.get("id")) + query_res.append(session_info) + except ( + ValueError, + TimeoutError, + psycopg2.ProgrammingError, + psycopg2.OperationalError, + ): + query_res = "Invalid SNARE UUID" + return query_res - async def return_session_info(self, sess_uuid, snare_uuid=None): - if snare_uuid: - snare_uuids = [snare_uuid] + async def return_sessions(self, filters): + """Returns the list of all the sessions. + Uses apply_filters function in this class + to make the query accordingly. + + Args: + filters (dict): all the filters that is to be applied + + Returns: + [list]: list of sessions + """ + results = [] + invalid_filters = [] + filter_list = [ + "attack_type", + "owners", + "start_time", + "end_time", + "peer_ip", + "user_agent", + "sensor_id", + ] + + for fil in filters: + if fil not in filter_list: + invalid_filters.append(fil) + + if invalid_filters: + results = "Invalid filters" else: - snare_uuids = await self.return_snares() + stmt = self.apply_filters(filters) + if stmt != "Invalid filter value": + async with self.pg_client.acquire() as conn: + query = await (await conn.execute(stmt)).fetchall() - for snare_id in snare_uuids: - sessions = await self.return_snare_info(snare_id) - if sessions == 'Invalid SNARE UUID': - continue - for sess in sessions: - if sess['sess_uuid'] == sess_uuid: - return sess + for row in query: + results.append(str(row[0])) + results = list(set(results)) + else: + results = stmt - async def return_sessions(self, filters): - snare_uuids = await self.return_snares() - - matching_sessions = [] - for snare_id in snare_uuids: - result = await self.return_snare_info(snare_id) - if result == 'Invalid SNARE UUID': - return 'Invalid filter : SNARE UUID' - sessions = result - for sess in sessions: - match_count = 0 - for filter_name, filter_value in filters.items(): - try: - if self.apply_filter(filter_name, filter_value, sess): - match_count += 1 - except KeyError: - return 'Invalid filter : %s' % filter_name - - if match_count == len(filters): - matching_sessions.append(sess) - - return matching_sessions + return results async def return_latest_session(self): latest_time = -1 @@ -98,31 +232,72 @@ async def return_latest_session(self): snares = await self.return_snares() try: for snare in snares: - filters = {'snare_uuid': snare} + filters = {"snare_uuid": snare} sessions = await self.return_sessions(filters) for session in sessions: - if latest_time < session['end_time']: - latest_time = session['end_time'] - latest_session = session['sess_uuid'] + if latest_time < session["end_time"]: + latest_time = session["end_time"] + latest_session = session["sess_uuid"] except TypeError: return None return latest_session - def apply_filter(self, filter_name, filter_value, sess): - available_filters = {'user_agent': operator.contains, - 'peer_ip': operator.eq, - 'attack_types': operator.contains, - 'possible_owners': operator.contains, - 'start_time': operator.le, - 'end_time': operator.ge, - 'snare_uuid': operator.eq, - 'location': operator.contains - } + def apply_filters(self, filters): + """Makes SQL query according to the give filters - try: - if available_filters[filter_name] is operator.contains: - return available_filters[filter_name](sess[filter_name], filter_value) - else: - return available_filters[filter_name](filter_value, sess[filter_name]) - except KeyError: - raise + Args: + filters (dict): all the filters that is to be applied + + Returns: + [str]: A sql query in string format + """ + + def check_time(time): + """Check the format of the time passed in filters + + Args: + time ([str]): Time in RFC format + + Returns: + [str]: Time in humar readable format + """ + try: + formatted_time = datetime.datetime.strptime(time, "%d-%m-%YT%H:%M:%S") + except ValueError: + time = time + "T00:00:00" + formatted_time = datetime.datetime.strptime(time, "%d-%m-%YT%H:%M:%S") + + return str(formatted_time) + + tables = "sessions S" + columns = "S.id" + where = "S.sensor_id='%s'" % (filters["sensor_id"]) + + if "attack_type" in filters: + tables += ", paths P" + columns += ", P.session_id" + try: + attack_type = AttackType[filters["attack_type"]].value + except KeyError: + return "Invalid filter value" + + where += " AND P.attack_type=%s AND S.id=P.session_id" % (attack_type) + if "owners" in filters: + tables += ", owners O" + columns += ", O.session_id" + where += " AND O.owner_type='%s' AND S.id=O.session_id" % ( + filters["owners"] + ) + if "start_time" in filters: + start_time = check_time(filters["start_time"]) + where += " AND S.start_time>='%s'" % (start_time) + if "end_time" in filters: + end_time = check_time(filters["end_time"]) + where += " AND S.end_time<='%s'" % (end_time) + if "peer_ip" in filters: + where += " AND S.ip='%s'" % (filters["peer_ip"]) + if "user_agent" in filters: + where += " AND S.user_agent='%s'" % (filters["user_agent"]) + + stmt = "SELECT %s FROM %s WHERE %s" % (columns, tables, where) + return stmt diff --git a/tanner/api/server.py b/tanner/api/server.py index ccc62bad..e6cdac02 100644 --- a/tanner/api/server.py +++ b/tanner/api/server.py @@ -5,7 +5,7 @@ from aiohttp.web import middleware from tanner.api import api -from tanner import redis_client +from tanner import postgres_client from tanner.config import TannerConfig from tanner.utils.api_key_generator import generate @@ -38,7 +38,15 @@ async def handle_snares(self, request): async def handle_snare_info(self, request): snare_uuid = request.match_info['snare_uuid'] - result = await self.api.return_snare_info(snare_uuid, 50) + try: + count = int(request.rel_url.query['count']) + offset = int(request.rel_url.query['offset']) + except KeyError: + # Set default values + count = 1000 + offset = 0 + + result = await self.api.return_snare_info(snare_uuid, count, offset) response_msg = self._make_response(result) return web.json_response(response_msg) @@ -51,23 +59,21 @@ async def handle_snare_stats(self, request): async def handle_sessions(self, request): snare_uuid = request.match_info['snare_uuid'] params = request.url.query - applied_filters = {'snare_uuid': snare_uuid} + applied_filters = {'sensor_id': snare_uuid} try: if 'filters' in params: for filt in params['filters'].split(): - applied_filters[filt.split(':')[0]] = filt.split(':')[1] + applied_filters[filt.split(':', 1)[0]] = filt.split(':', 1)[1] if 'start_time' in applied_filters: - applied_filters['start_time'] = float(applied_filters['start_time']) + applied_filters['start_time'] = applied_filters['start_time'] if 'end_time' in applied_filters: - applied_filters['end_time'] = float(applied_filters['end_time']) + applied_filters['end_time'] = applied_filters['end_time'] except Exception as e: self.logger.exception('Filter error : %s' % e) result = 'Invalid filter definition' else: sessions = await self.api.return_sessions(applied_filters) - sess_uuids = [sess['sess_uuid'] for sess in sessions] - result = sess_uuids - response_msg = self._make_response(result) + response_msg = self._make_response(sessions) return web.json_response(response_msg) async def handle_session_info(self, request): @@ -77,7 +83,7 @@ async def handle_session_info(self, request): return web.json_response(response_msg) async def on_shutdown(self, app): - self.redis_client.close() + self.pg_client.close() @middleware async def auth(self, request, handler): @@ -108,8 +114,10 @@ def create_app(self, loop, auth=False): def start(self): loop = asyncio.get_event_loop() - self.redis_client = loop.run_until_complete(redis_client.RedisClient.get_redis_client(poolsize=20)) - self.api = api.Api(self.redis_client) + self.pg_client = loop.run_until_complete( + postgres_client.PostgresClient().get_pg_client() + ) + self.api = api.Api(self.pg_client) set_auth = TannerConfig.get('API', 'auth') app = self.create_app(loop, set_auth) host = TannerConfig.get('API', 'host') diff --git a/tanner/dbutils.py b/tanner/dbutils.py index c1f07685..9bf44bc6 100644 --- a/tanner/dbutils.py +++ b/tanner/dbutils.py @@ -1,10 +1,72 @@ import asyncio - import logging -import psycopg2 from datetime import datetime + +import psycopg2 +from sqlalchemy import ( + Column, + ForeignKey, + Integer, + MetaData, + String, + Table, + insert, + inspect, +) +from sqlalchemy.dialects.postgresql import FLOAT, INET, TIMESTAMP, UUID +from sqlalchemy.sql.ddl import CreateTable + from tanner.utils.attack_type import AttackType +meta = MetaData() +SESSIONS = Table( + "sessions", + meta, + Column("id", UUID(as_uuid=True), primary_key=True, unique=True), + Column( + "sensor_id", UUID(as_uuid=True), primary_key=True, index=True, nullable=False + ), + Column("ip", INET, nullable=False), + Column("port", Integer, nullable=False), + Column("country", String, nullable=True), + Column("country_code", String, nullable=True), + Column("city", String, nullable=True), + Column("zip_code", Integer, nullable=True), + Column("user_agent", String, nullable=False), + Column("start_time", TIMESTAMP, nullable=False), + Column("end_time", TIMESTAMP, nullable=False), + Column("rps", FLOAT, nullable=False, comment="requests per second"), + Column("atbr", FLOAT, nullable=False, comment="approx_time_between_requests"), + Column("accepted_paths", Integer, nullable=False), + Column("errors", Integer, nullable=False), + Column("hidden_links", Integer, nullable=False), + Column("referer", String), +) + +PATHS = Table( + "paths", + meta, + Column("session_id", UUID(as_uuid=True), ForeignKey("sessions.id"), index=True), + Column("path", String, nullable=False), + Column("created_at", TIMESTAMP), + Column("response_status", Integer, nullable=False), + Column("attack_type", Integer, nullable=False), +) +COOKIES = Table( + "cookies", + meta, + Column("session_id", UUID(as_uuid=True), ForeignKey("sessions.id"), index=True), + Column("key", String), + Column("value", String), +) +OWNERS = Table( + "owners", + meta, + Column("session_id", UUID(as_uuid=True), ForeignKey("sessions.id"), index=True), + Column("owner_type", String), + Column("probability", FLOAT), +) + class DBUtils: @staticmethod @@ -13,76 +75,16 @@ async def create_data_tables(pg_client): the postgres database Arguments: - pg_client {aiopg.pool.Pool} + pg_client {aiopg.sa.engine.Engine} """ - async with pg_client.acquire() as conn: - async with conn.cursor() as cur: - - await cur.execute( - """ - CREATE TABLE IF NOT EXISTS "sessions" ( - "id" UUID PRIMARY KEY, - "sensor_id" UUID NOT NULL, - "ip" INET NOT NULL, - "port" INT NOT NULL, - "country" TEXT NULL, - "country_code" TEXT NULL, - "city" TEXT NULL, - "zip_code" INT NULL, - "user_agent" TEXT NOT NULL, - "start_time" TIMESTAMP DEFAULT NOW(), - "end_time" TIMESTAMP DEFAULT NOW(), - "rps" FLOAT NOT NULL, - "atbr" FLOAT NOT NULL, - "accepted_paths" INT NOT NULL, - "errors" INT NOT NULL, - "hidden_links" INT NOT NULL, - "referer" TEXT NOT NULL - ) - """ - ) - await cur.execute( - """ - CREATE TABLE IF NOT EXISTS "paths" ( - "session_id" UUID REFERENCES sessions(id), - "path" TEXT NOT NULL, - "created_at" TIMESTAMP DEFAULT now(), - "response_status" INT NOT NULL, - "attack_type" INT NOT NULL - ) - """ - ) + Tables = [SESSIONS, PATHS, COOKIES, OWNERS] - await cur.execute( - """ - CREATE TABLE IF NOT EXISTS "cookies" ( - "session_id" UUID REFERENCES sessions(id), - "key" TEXT NULL, - "value" TEXT NULL - ) - """ - ) - await cur.execute( - """ - CREATE TABLE IF NOT EXISTS "owners" ( - "session_id" UUID REFERENCES sessions(id), - "owner_type" TEXT, - "probability" FLOAT - ) - """ - ) - await cur.execute( - "comment on column sessions.rps is 'requests per second'" - ) - await cur.execute( - "comment on column sessions.atbr is 'approx_time_between_requests'" - ) - await cur.execute("CREATE INDEX ON sessions(sensor_id)") - await cur.execute('CREATE INDEX ON "paths"(session_id)') - await cur.execute('CREATE INDEX ON "cookies"(session_id)') - await cur.execute('CREATE INDEX ON "owners"(session_id)') - cur.close() - conn.close() + async with pg_client.acquire() as conn: + for table in Tables: + try: + await conn.execute(CreateTable(table)) + except psycopg2.errors.DuplicateTable: + continue @staticmethod async def add_analyzed_data(session, pg_client): @@ -90,7 +92,7 @@ async def add_analyzed_data(session, pg_client): Arguments: session {dict} -- dictionary having all the sessions details - pg_client {aiopg.pool.Pool} + pg_client {aiopg.sa.engine.Engine} """ def time_convertor(time): @@ -100,25 +102,7 @@ def time_convertor(time): Arguments: time {str} -- time in epoch format """ - return datetime.fromtimestamp(time).strftime('%Y-%m-%d %H:%M:%S') - - Cookies = "INSERT INTO cookies(session_id, key, value) VALUES('{uuid}', '{key}', '{value}');" - Sessions = ( - "INSERT INTO sessions (id, sensor_id, ip, port, country," - "country_code, city, zip_code, user_agent, start_time," - "end_time, rps, atbr, accepted_paths, errors, hidden_links, referer) " - "VALUES ('{uuid}','{sensor}','{ip}',{port},'{country}'," - "'{ccode}','{city}',{zcode},'{ua}','{st}','{et}',{rps}," - "{atbr},{apaths},{err},{hlinks},'{referer}');" - ) - Paths = ( - "INSERT INTO paths (session_id, path, created_at, response_status, attack_type) " - "VALUES ('{id}','{path}','{time}',{res},{atype});" - ) - Owners = ( - "INSERT INTO owners (session_id, owner_type, probability) " - "VALUES ('{id}', '{key}', {val});" - ) + return datetime.fromtimestamp(time).strftime("%Y-%m-%d %H:%M:%S") start_time = time_convertor(session["start_time"]) end_time = time_convertor(session["end_time"]) @@ -126,54 +110,59 @@ def time_convertor(time): logger = logging.getLogger(__name__) try: - sessions_query = Sessions.format( - uuid=session["sess_uuid"], - sensor=session["snare_uuid"], - ip=session["peer_ip"], - port=session["peer_port"], - country=session["location"]["country"], - ccode=session["location"]["country_code"], - city=session["location"]["city"], - zcode=session["location"]["zip_code"], - ua=session["user_agent"], - st=start_time, - et=end_time, - rps=session["requests_in_second"], - atbr=session["approx_time_between_requests"], - apaths=session["accepted_paths"], - err=session["errors"], - hlinks=session["hidden_links"], - referer=session["referer"] - ) - async with pg_client.acquire() as conn: - async with conn.cursor() as cur: - await cur.execute(sessions_query) - for k, v in session["cookies"].items(): - await cur.execute( - Cookies.format(uuid=session["sess_uuid"], key=k, value=v) - ) - - for path in session["paths"]: - timestamp = time_convertor(path["timestamp"]) - paths_query = Paths.format( - id=session["sess_uuid"], - path=path["path"], - time=timestamp, - res=path["response_status"], - atype=AttackType[path["attack_type"]].value - ) + await conn.execute( + SESSIONS.insert(), + id=session["sess_uuid"], + sensor_id=session["snare_uuid"], + ip=session["peer_ip"], + port=session["peer_port"], + country=session["location"]["country"], + country_code=session["location"]["country_code"], + city=session["location"]["city"], + zip_code=session["location"]["zip_code"], + user_agent=session["user_agent"], + start_time=start_time, + end_time=end_time, + rps=session["requests_in_second"], + atbr=session["approx_time_between_requests"], + accepted_paths=session["accepted_paths"], + errors=session["errors"], + hidden_links=session["hidden_links"], + referer=session["referer"], + ) - await cur.execute(paths_query) + for k, v in session["cookies"].items(): + await conn.execute( + COOKIES.insert(), + session_id=session["sess_uuid"], + key=k, + value=v, + ) - for k, v in session["possible_owners"].items(): - await cur.execute(Owners.format(id=session["sess_uuid"], key=k, val=v)) + for path in session["paths"]: + timestamp = time_convertor(path["timestamp"]) + try: + attackType = AttackType[path["attack_type"]].value + except KeyError: + attackType = 0 + await conn.execute( + PATHS.insert(), + session_id=session["sess_uuid"], + path=path["path"], + created_at=timestamp, + response_status=path["response_status"], + attack_type=attackType, + ) - cur.close() - conn.close() + for k, v in session["possible_owners"].items(): + await conn.execute( + insert(OWNERS).values( + session_id=session["sess_uuid"], owner_type=k, probability=v + ) + ) except psycopg2.ProgrammingError as pg_error: logger.exception( - "Error with Postgres. Session not added to postgres: %s", - pg_error, + "Error with Postgres. Session not added to postgres: %s", pg_error, ) diff --git a/tanner/emulators/base.py b/tanner/emulators/base.py index 9a141e0f..09741b66 100644 --- a/tanner/emulators/base.py +++ b/tanner/emulators/base.py @@ -1,6 +1,6 @@ import mimetypes import re -import urllib.parse +from urllib.parse import unquote, parse_qsl, urlparse import yarl from tanner import __version__ as tanner_version @@ -52,14 +52,11 @@ def extract_get_data(self, path): """ Return all the GET parameter :param path (str): The URL path from which GET parameters are to be extracted - :return: A MultiDictProxy object containg name and value of parameters + :return: A dict containg name and value of parameters """ - path = urllib.parse.unquote(path) - encodings = [('&&', '%26%26'), (';', '%3B')] - for value, encoded_value in encodings: - path = path.replace(value, encoded_value) - get_data = yarl.URL(path).query - return get_data + path = urlparse(path) + queries = parse_qsl(unquote(path.query)) + return dict(queries) async def get_emulation_result(self, session, data, target_emulators): """ diff --git a/tanner/postgres_client.py b/tanner/postgres_client.py index 0a96cbfe..ab72d0f9 100644 --- a/tanner/postgres_client.py +++ b/tanner/postgres_client.py @@ -2,6 +2,8 @@ import logging import aiopg +from aiopg.sa import create_engine +import sqlalchemy as sa import psycopg2 from tanner.config import TannerConfig @@ -24,12 +26,16 @@ def __init__(self): async def get_pg_client(self): pg_client = None try: - dsn = "dbname={} user={} password={} host={} port={}".format( - self.db_name, self.user, self.password, self.host, self.port - ) - pg_client = await asyncio.wait_for( - aiopg.create_pool(dsn, maxsize=self.poolsize), timeout=int(self.timeout) + create_engine( + host=self.host, + port=self.port, + user=self.user, + password=self.password, + database=self.db_name, + maxsize=self.poolsize, + ), + timeout=int(self.timeout) ) except ( asyncio.TimeoutError, diff --git a/tanner/server.py b/tanner/server.py index 9afc8732..e34bd4d0 100644 --- a/tanner/server.py +++ b/tanner/server.py @@ -66,7 +66,7 @@ async def handle_event(self, request): self.logger.info("Requested path %s", path) await self.dorks.extract_path(path, self.redis_client) detection = await self.base_handler.handle(data, session) - session.set_attack_type(path, detection["name"]) + session.set_attack_type(data['path'], detection["name"]) response_msg = self._make_response( msg=dict(detection=detection, sess_uuid=session.get_uuid()) diff --git a/tanner/tests/test_api_server.py b/tanner/tests/test_api_server.py index bf359011..1e977f7b 100644 --- a/tanner/tests/test_api_server.py +++ b/tanner/tests/test_api_server.py @@ -10,10 +10,10 @@ class TestAPIServer(AioHTTPTestCase): def setUp(self): self.serv = server.ApiServer() - redis = mock.Mock() - redis.close = mock.Mock() - self.serv.redis_client = redis - self.serv.api = api.Api(self.serv.redis_client) + postgres = mock.Mock() + postgres.close = mock.Mock() + self.serv.pg_client = postgres + self.serv.api = api.Api(self.serv.pg_client) super(TestAPIServer, self).setUp() @@ -49,8 +49,8 @@ async def mock_return_snares(): @unittest_run_loop async def test_api_snare_info_request(self): - async def mock_return_snare_info(snare_uuid, count): - if snare_uuid == "8fa6aa98-4283-4085-bfb9-a1cd3a9e56e4" and count == 50: + async def mock_return_snare_info(snare_uuid): + if snare_uuid == "8fa6aa98-4283-4085-bfb9-a1cd3a9e56e4": return [{"test_sess1": "sess1_info"}, {"test_sess1": "sess2_info"}] assert_content = {"version": 1, diff --git a/tanner/utils/attack_type.py b/tanner/utils/attack_type.py index 8ec4744e..1a34bfcb 100644 --- a/tanner/utils/attack_type.py +++ b/tanner/utils/attack_type.py @@ -3,6 +3,7 @@ @unique class AttackType(Enum): + unknown = 0 rfi = 1 lfi = 2 xss = 3 diff --git a/tanner/web/server.py b/tanner/web/server.py index b9e8c709..7572849e 100644 --- a/tanner/web/server.py +++ b/tanner/web/server.py @@ -5,7 +5,7 @@ from aiohttp import web from tanner.api import api -from tanner import redis_client +from tanner import postgres_client from tanner.config import TannerConfig from tanner import __version__ as tanner_version @@ -54,21 +54,26 @@ async def handle_sessions(self, request): snare_uuid = request.match_info['snare_uuid'] page_id = int(request.match_info['page_id']) params = request.url.query - applied_filters = {'snare_uuid': snare_uuid} + applied_filters = {'sensor_id': snare_uuid} try: if 'filters' in params: for filt in params['filters'].split(): - applied_filters[filt.split(':')[0]] = filt.split(':')[1] + applied_filters[filt.split(':', 1)[0]] = filt.split(':', 1)[1] if 'start_time' in applied_filters: - applied_filters['start_time'] = float(applied_filters['start_time']) + applied_filters['start_time'] = applied_filters['start_time'] if 'end_time' in applied_filters: - applied_filters['end_time'] = float(applied_filters['end_time']) + applied_filters['end_time'] = applied_filters['end_time'] except Exception as e: self.logger.exception('Filter error : %s' % e) result = 'Invalid filter definition' else: + result = [] sessions = await self.api.return_sessions(applied_filters) - result = sessions[15 * (page_id - 1):15 * page_id] + partial_sessions = sessions[15 * (page_id - 1):15 * page_id] + for sess in partial_sessions: + info = await self.api.return_session_info(sess) + result.append({"sess_uuid": info["id"], "peer_ip": info["ip"], "possible_owners": info["owners"]}) + next_val = None pre_val = None if page_id * 15 <= len(sessions): @@ -120,8 +125,10 @@ def create_app(self, loop): def start(self): loop = asyncio.get_event_loop() - self.redis_client = loop.run_until_complete(redis_client.RedisClient.get_redis_client(poolsize=20)) - self.api = api.Api(self.redis_client) + self.pg_client = loop.run_until_complete( + postgres_client.PostgresClient().get_pg_client() + ) + self.api = api.Api(self.pg_client) app = self.create_app(loop) host = TannerConfig.get('WEB', 'host') port = int(TannerConfig.get('WEB', 'port')) diff --git a/tanner/web/templates/session.html b/tanner/web/templates/session.html index 67c996ec..acb865e7 100644 --- a/tanner/web/templates/session.html +++ b/tanner/web/templates/session.html @@ -11,19 +11,29 @@

SESSION INFO

UUID - {{session.sess_uuid}} + {{session.id}} IP - {{session.peer_ip}} - - - Location - {{session.location}} - + {{session.ip}} + + {% if session.country and session.city and session.zip_code %} + + Country + {{session.country}} + + + Country + {{session.country}} + + + Country + {{session.country}} + + {% endif %} Port - {{session.peer_port}} + {{session.port}} User Agents @@ -31,23 +41,23 @@

SESSION INFO

Snare UUID - {{session.snare_uuid}} + {{session.sensor_id}} Start time - {{session.start_time}} + {{session.start_time}} End time - {{session.end_time}} + {{session.end_time}} Requests/sec - {{session.requests_in_second}} + {{session.rps}} Time between requests - {{session.approx_time_between_requests}} + {{session.atbr}} Paths @@ -61,14 +71,7 @@

SESSION INFO

Hidden Links {{session.hidden_links}} - - Attacks - - {% for attack, v in session.attack_count.items() %} - {{attack}} - {{v}}
- {% endfor %} - - + Paths @@ -92,10 +95,10 @@

SESSION INFO

Possible Owners - {% for owner, cf in session.possible_owners.items() %} - {{owner}} : {{cf}}
+ {% for owner, cf in session.owners.items() %} + {{owner}} : {{cf}}
{% endfor %} -{% endblock %} \ No newline at end of file +{% endblock %}