Skip to content

Commit

Permalink
Restore tasks of the old scheduler
Browse files Browse the repository at this point in the history
  • Loading branch information
m-o-d-e-r committed Jun 12, 2024
1 parent f3b6461 commit cd24ca4
Show file tree
Hide file tree
Showing 3 changed files with 158 additions and 0 deletions.
46 changes: 46 additions & 0 deletions burrito/utils/tasks/new_tickets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import datetime

from burrito.utils.email_util import publish_email
from burrito.utils.email_templates import TEMPLATE__EMAIL_NOTIFICATION_FOR_ADMIN
from burrito.utils.query_util import STATUS_NEW
from burrito.utils.logger import get_logger

from burrito.models.tickets_model import Tickets
from burrito.models.user_model import Users


MAX_UNCHANGED_DAYS = 2


def check_for_new_tickets():
tickets_list: list[Tickets] = Tickets.select(Tickets.ticket_id, Tickets.subject, Tickets.created).where(
Tickets.status == STATUS_NEW
)
admins_list = [
item.user_id for item in Users.select(Users.user_id).where(
Users.role.in_((9, 10))
)
]
tickets_info: list[str] = []

for ticket_item in tickets_list:
ticket_created = datetime.datetime.strptime(str(ticket_item.created), "%Y-%m-%d %H:%M:%S")

if (datetime.datetime.now() - ticket_created).days > MAX_UNCHANGED_DAYS:
tickets_info.append(
f"""
#{ticket_item.ticket_id} "{ticket_item.subject}":
Дата створення: {ticket_item.created}
"""
)

if tickets_info:
publish_email(
admins_list,
TEMPLATE__EMAIL_NOTIFICATION_FOR_ADMIN["subject"].format(days_count=MAX_UNCHANGED_DAYS),
TEMPLATE__EMAIL_NOTIFICATION_FOR_ADMIN["content"].format(
days_count=MAX_UNCHANGED_DAYS,
data="".join(tickets_info)
)
)
get_logger().info(f"Found {len(tickets_list)} tickets with status NEW")
14 changes: 14 additions & 0 deletions burrito/utils/tasks/ping.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import socket

from burrito.utils.logger import get_logger


def burrito_ping(host, port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)

try:
sock.connect((host, int(port)))
except socket.error:
get_logger().critical(f"({host}, {port}) is unreachable")
except Exception as exc:
get_logger().error(exc)
98 changes: 98 additions & 0 deletions burrito/utils/tasks/preprocessor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import pymysql.cursors
import orjson as json

from peewee import IntegrityError

from burrito.utils.task_manager import get_task_manager
from burrito.utils.config_reader import get_config
from burrito.utils.logger import get_logger
from burrito.utils.db_cursor_object import get_database_cursor

from burrito.models.group_model import Groups
from burrito.models.statuses_model import Statuses
from burrito.models.faculty_model import Faculties
from burrito.models.queues_model import Queues
from burrito.models.permissions_model import Permissions
from burrito.models.roles_model import Roles
from burrito.models.role_permissions_model import RolePermissions

from burrito.plugins.loader import PluginLoader


MODEL_KEYS = {
"groups": Groups,
"faculties": Faculties,
"statuses": Statuses,
"queues": Queues,
"permissions": Permissions,
"roles": Roles,
"role_permissions": RolePermissions
}

DEFAULT_CONFIG = ""

with open("preprocessor_config.json", "r", encoding="utf-8") as file:
DEFAULT_CONFIG = json.loads(file.read())


def preprocessor_task():
get_logger().info("Preprocessor is started")

conn = None

try:
conn = pymysql.connect(
database=get_config().BURRITO_DB_NAME,
user=get_config().BURRITO_DB_USER,
password=get_config().BURRITO_DB_PASSWORD,
host=get_config().BURRITO_DB_HOST,
port=int(get_config().BURRITO_DB_PORT),
cursorclass=pymysql.cursors.DictCursor
)
except Exception as e:
get_logger().warning(e)
return

with conn:
__sql_commands: dict = {}
__config_data: dict = {}

data: dict = DEFAULT_CONFIG
data["groups"] = PluginLoader.execute_plugin("group_updates")
data["faculties"] += PluginLoader.execute_plugin("faculty_updates")

for key, value in data.items():
if not key.startswith("__"):
__config_data[key] = value
continue

if key == "__tables_option":
__sql_commands = value

for table, config_values in __config_data.items():
with conn.cursor() as cursor:
cursor.execute(__sql_commands[table])
conn.commit()

config_filtered_values: set = {tuple(i.values()) for i in config_values}
db_filtered_values: set = {tuple(i.values()) for i in cursor.fetchall()}

if config_filtered_values.difference(db_filtered_values):
for value in config_values:
try:
if table in ("groups", "faculties"):
get_task_manager().add_task(MODEL_KEYS[table].create, **value)
else:
if table == "queues":
get_database_cursor().execute_sql("SET FOREIGN_KEY_CHECKS=0")
MODEL_KEYS[table].create(**value)
if table == "queues":
get_database_cursor().execute_sql("SET FOREIGN_KEY_CHECKS=1")

except IntegrityError: # duplicates error while insert data
...

except Exception as e:
get_logger().warning(f"Preprocessor error: {e}")

get_logger().info("Preprocessor sub-tasks pushed to task manager")

0 comments on commit cd24ca4

Please sign in to comment.