Skip to content

Commit

Permalink
uv migration and docker refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
leftmove committed Dec 21, 2024
1 parent b4bcee5 commit 70c3945
Show file tree
Hide file tree
Showing 16 changed files with 136 additions and 79 deletions.
2 changes: 1 addition & 1 deletion backend/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ REDIS_PORT = 6379
REDIS_PASSWORD = "***********"

MEILI_SERVER_URL = "http://${SERVER}:7700"
MEILI_MASTER_KEY = "***********"
MEILI_MASTER_KEY = "qq80RvopBK1kjvdlSVG_8VaxsRZICP0uniq5F2v0nlM"

SENTRY_DSN = ""
TELEMETRY = False
4 changes: 3 additions & 1 deletion backend/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@ COPY uv.lock pyproject.toml /app/
# Install requirements
RUN uv sync

# Copy the applications
COPY main.py /app/
COPY routers /app/routers/
COPY worker /app/worker/
COPY static /app/static/

# Command to run the application
CMD ["uv", "run", "python", "main.py"]
CMD ["uv", "run", "python", "-m", "main"]
15 changes: 15 additions & 0 deletions backend/docker-compose.dev.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
services:
# Redis
cache:
container_name: cache
build:
Expand All @@ -7,6 +8,8 @@ services:
restart: always
ports:
- 6379:6379

# MongoDB
database:
container_name: database
build:
Expand All @@ -17,6 +20,8 @@ services:
restart: always
ports:
- 27017:27017

# Meilisearch
search:
container_name: search
build:
Expand All @@ -27,3 +32,13 @@ services:
restart: always
ports:
- 7700:7700

# Celery
worker:
container_name: worker
build:
context: ./
dockerfile: ./worker/Dockerfile
depends_on:
- cache
restart: always
73 changes: 48 additions & 25 deletions backend/docker-compose.prod.yaml
Original file line number Diff line number Diff line change
@@ -1,43 +1,65 @@
version: "3.4"

x-common-variables: &common-variables # General
APP_NAME: "backend"
ENVIRONMENT: "production"
ADMIN_PASSWORD: "***********"

# Server Config
WORKERS: 9
HOST: "0.0.0.0"
EXPOSE_PORT: 8000
FORWARDED_ALLOW_IPS: "*"

# Stock APIs
FINN_HUB_API_KEY: "***********"
ALPHA_VANTAGE_API_KEY: "***********"
OPEN_FIGI_API_KEY: "***********"

# Database APIs
MONGO_SERVER_URL: "database"
MONGO_BACKUP_URL: "1LT4xiFJkh6YlAPQDcov8YIKqcvevFlEE"
REDIS_SERVER_URL: "cache"
REDIS_PORT: 6379
MEILI_SERVER_URL: "search"
MEILI_MASTER_KEY: "***********"

# Telemetry
TELEMETRY: True
SENTRY_DSN: "***********"

services:
# FastAPI
backend:
container_name: backend
build:
dockerfile: Dockerfile
restart: always
depends_on:
- database
- cache
- search
- worker
volumes:
- ./public:/app/public
networks:
- proxy-network
environment:
APP_NAME: "backend"
ENVIRONMENT: "production"
ADMIN_PASSWORD: "***********"

WORKERS: 9
HOST: "0.0.0.0"
EXPOSE_PORT: 8000
FORWARDED_ALLOW_IPS: "*"

FINN_HUB_API_KEY: "***********"
ALPHA_VANTAGE_API_KEY: "***********"
OPEN_FIGI_API_KEY: "***********"

MONGO_SERVER_URL: "database"
MONGO_BACKUP_URL: "1LT4xiFJkh6YlAPQDcov8YIKqcvevFlEE"
REDIS_SERVER_URL: "cache"
REDIS_PORT: 6379
MEILI_SERVER_URL: "search"
MEILI_MASTER_KEY: "***********"
environment: *common-variables
restart: always

TELEMETRY: True
SENTRY_DSN: "***********"
# Celery
worker:
container_name: worker
build:
context: ./
dockerfile: ./worker/Dockerfile
depends_on:
- cache
networks:
- proxy-network
environment: *common-variables
restart: always

# Redis
cache:
container_name: cache
build:
Expand All @@ -47,6 +69,7 @@ services:
- proxy-network
restart: always

# MongoDB
database:
container_name: database
build:
Expand All @@ -58,6 +81,7 @@ services:
- ./database/main_db:/data/db
restart: always

# Meilisearch
search:
container_name: search
build:
Expand All @@ -67,9 +91,8 @@ services:
- ./search/search_db:/meili_data
networks:
- proxy-network
environment: *common-variables # Only MEILI_SERVER_URL and MEILI_MASTER_KEY are needed.
restart: always
environment:
MEILI_MASTER_KEY: "***********"

networks:
proxy-network:
Expand Down
7 changes: 4 additions & 3 deletions backend/routers/filer.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
from urllib import parse
from datetime import datetime

from . import worker
from .worker import production_environment
from worker import tasks as worker

from .lib import web
from .lib import database
Expand All @@ -20,6 +19,8 @@
from .lib.api import sec_filer_search
from .lib.cache import cache

production_environment = getattr(worker, "production_environment", False)


class Filer(BaseModel):
cik: str
Expand Down Expand Up @@ -198,7 +199,7 @@ def create_historical(cik, company, stamp):


def create_filer(cik, sec_data):
company, stamp = web.initalize_filer(cik, sec_data)
company, stamp = web.initialize_filer(cik, sec_data)
create_recent(cik, company, stamp)
create_historical(cik, company, stamp)

Expand Down
4 changes: 3 additions & 1 deletion backend/routers/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@
import os
import logging

from worker.tasks import try_filer, replace_filer, delay_error, production_environment

from .lib import database
from .lib import cache as cm
from .lib.backup import save_collections

from .filer import popular_cik_list, top_cik_list
from .worker import try_filer, replace_filer, delay_error, production_environment

cache = cm.cache
router = APIRouter(
Expand All @@ -32,6 +33,7 @@ async def info():
async def info_undefined():
return {"message": "Hello World!"}


@cache(4)
@router.get("/health", status_code=200)
async def health():
Expand Down
10 changes: 3 additions & 7 deletions backend/routers/lib/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -789,12 +789,8 @@ def sort_and_format(filer_ciks):
"updated": 1,
"_id": 0,
}

for cik in filer_ciks:
filers = []
filer = database.find_filer(cik, project)
if filer:
filers.append(filer)
filers = [filer for filer in database.find_filers({"cik": {"$in": filer_ciks}}, project)]


try:
filers_sorted = [
Expand Down Expand Up @@ -825,7 +821,7 @@ def sort_and_format(filer_ciks):
)
filer.pop("_id", None)
except Exception as e:
errors.report_error(cik, e)
errors.report_error(filer.get("cik", "NA"), e)
filer["date"] = "NA"
filer["market_value"] = "NA"
return filers_sorted
Expand Down
22 changes: 8 additions & 14 deletions backend/routers/lib/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,14 @@
import requests
import logging

from dotenv import load_dotenv
from datetime import datetime

from . import database
from . import analysis

load_dotenv()

logging.info("[ APIs Initializing ] ...")

# Requests
Expand All @@ -16,19 +19,10 @@
"User-Agent": "wallstreetlocal admin@wallstreetlocal.com ",
}

ENVIRONMENT = os.environ.get("ENVIRONMENT", "development")
production_environment = True if ENVIRONMENT == "production" else False
if not production_environment:
from dotenv import load_dotenv

load_dotenv(".env.development")

# Environment Variables
FINN_HUB_API_KEY = os.environ["FINN_HUB_API_KEY"]
ALPHA_VANTAGE_API_KEY = os.environ["ALPHA_VANTAGE_API_KEY"]
OPEN_FIGI_API_KEY = os.environ["OPEN_FIGI_API_KEY"]

# pyright: reportUnboundVariable=false
# API Variables
FINN_HUB_API_KEY = os.environ.get("FINN_HUB_API_KEY", "")
ALPHA_VANTAGE_API_KEY = os.environ.get("ALPHA_VANTAGE_API_KEY", "")
OPEN_FIGI_API_KEY = os.environ.get("OPEN_FIGI_API_KEY", "")


def rate_limit(cik, wait=60):
Expand Down Expand Up @@ -140,7 +134,7 @@ def sec_filer_search(cik):
cik,
custom_wait=600,
)

if res.ok:
data = res.json()
else:
Expand Down
6 changes: 3 additions & 3 deletions backend/routers/lib/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@
ENVIRONMENT = os.environ.get("ENVIRONMENT", "development")
production_environment = True if ENVIRONMENT == "production" else False

REDIS_SERVER_URL = os.environ["REDIS_SERVER_URL"]
REDIS_PORT = int(os.environ.get("REDIS_PORT", 14640))
REDIS_SERVER_URL = os.environ.get("REDIS_SERVER_URL", "cache")
REDIS_PORT = int(os.environ.get("REDIS_PORT", 6379))
REDIS_USERNAME = os.environ.get("REDIS_USERNAME", "default")
REDIS_PASSWORD = os.environ["REDIS_PASSWORD"]
REDIS_PASSWORD = os.environ.get("REDIS_PASSWORD", "")

store = redis.Redis(
host=REDIS_SERVER_URL,
Expand Down
2 changes: 1 addition & 1 deletion backend/routers/lib/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

load_dotenv()

MONGO_SERVER_URL = os.environ["MONGO_SERVER_URL"]
MONGO_SERVER_URL = os.environ.get("MONGO_SERVER_URL", "mongodb://database:27017")
ENVIRONMENT = os.environ.get("ENVIRONMENT", "development")
production_environment = True if ENVIRONMENT == "production" else False

Expand Down
8 changes: 5 additions & 3 deletions backend/routers/lib/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,18 @@
ENVIRONMENT = os.environ.get("ENVIRONMENT", "development")
production_environment = True if ENVIRONMENT == "production" else False

MEILI_SERVER_URL = os.environ["MEILI_SERVER_URL"]
MEILI_MASTER_KEY = os.environ["MEILI_MASTER_KEY"]
MEILI_SERVER_URL = os.environ.get("MEILI_SERVER_URL", "http://search:7700")
MEILI_MASTER_KEY = os.environ.get(
"MEILI_MASTER_KEY", "qq80RvopBK1kjvdlSVG_8VaxsRZICP0uniq5F2v0nlM"
)


def _prepare_meilisearch():
client = meilisearch.Client(MEILI_SERVER_URL, MEILI_MASTER_KEY)
companies_index = client.index("companies")
indexes = client.get_indexes()
if not indexes or "companies" not in [index.uid for index in indexes]:
client.create_index("companies", {"primaryKey": "cik"})
client.create_index("companies", "cik")
try:
companies_index.update(primary_key="cik")
companies_index.update_displayed_attributes(
Expand Down
2 changes: 1 addition & 1 deletion backend/routers/lib/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def process_filings(cik, data):
return filings, last_report, first_report


def initalize_filer(cik, sec_data):
def initialize_filer(cik, sec_data):
company = {
"name": sec_data["name"],
"cik": cik,
Expand Down
8 changes: 2 additions & 6 deletions backend/routers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@
from sentry_sdk.integrations.pymongo import PyMongoIntegration
from sentry_sdk.integrations.logging import LoggingIntegration

from .worker import queue
from worker import tasks as queue

from .lib import errors
from .lib import database
from .lib import search
Expand Down Expand Up @@ -313,9 +314,4 @@ def insert_search(document_list: list):
with open(f"{cwd}/static/statistics.json", "w") as s:
json.dump(statistic, s, indent=6)

print("Starting Worker ...")
if production_environment:
worker = threading.Thread(target=start_worker)
worker.start()

print("Done!")
12 changes: 6 additions & 6 deletions backend/static/statistics.json
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
{
"latest": {
"count": 783,
"total": 1062215.2685668468,
"average": 1356.5967670074672
"count": 784,
"total": 1062224.7630047798,
"average": 1354.8785242407905
},
"historical": {
"count": 815,
"total": 3688009.7609632015,
"average": 4525.165350875094
"count": 816,
"total": 3688239.704072237,
"average": 4519.901598127742
}
}
Loading

0 comments on commit 70c3945

Please sign in to comment.