Skip to content

Commit

Permalink
fixed dockerfile
Browse files Browse the repository at this point in the history
  • Loading branch information
leftmove authored Aug 13, 2024
1 parent 2357ab4 commit eb51219
Show file tree
Hide file tree
Showing 12 changed files with 104 additions and 699 deletions.
38 changes: 27 additions & 11 deletions backend/Dockerfile.prod
Original file line number Diff line number Diff line change
@@ -1,18 +1,34 @@
FROM python:3.9
FROM python:3.9-slim as base

WORKDIR /app
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV POETRY_VERSION 1.8.3
ENV POETRY_HOME "/opt/poetry"
ENV POETRY_NO_INTERACTION 1
ENV PATH "$POETRY_HOME/bin:$PATH"

COPY ./main.py /app/main.py
# Install system dependencies
RUN apt-get update \
&& apt-get install -y --no-install-recommends curl \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

COPY ./routers /app/routers
COPY ./static /app/static
# Install Poetry - respects $POETRY_VERSION & $POETRY_HOME
RUN curl -sSL https://install.python-poetry.org | python3 -

RUN pip install poetry
RUN poetry config virtualenvs.create false

COPY pyproject.toml poetry.lock ./
RUN poetry install --only main
# Copy only requirements to cache them in docker layer
WORKDIR /app
COPY poetry.lock pyproject.toml /app/

# Project initialization:
RUN poetry config virtualenvs.create false
RUN poetry install --no-interaction

EXPOSE 8000
COPY main.py /app/
COPY routers /app/routers/
COPY static /app/static/

CMD ["python", "main.py"]
# Command to run the application
CMD ["poetry", "run", "python", "main.py"]
2 changes: 1 addition & 1 deletion backend/deploy.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
echo "Deploying Server..."
echo "Deploying to Server..."

git pull origin main

Expand Down
1 change: 0 additions & 1 deletion backend/install.sh
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
sudo docker system prune -f
sudo docker compose -f docker-compose.yaml up --build --force-recreate -d
20 changes: 19 additions & 1 deletion backend/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion backend/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ urllib3 = "^2.2.1"
uvicorn = "^0.28.0"
celery = { extras = ["redis"], version = "^5.4.0" }
sentry-sdk = { extras = ["celery"], version = "^2.0.1" }
meilisearch = "^0.31.4"

[tool.poetry.scripts]
dev = "main"
Expand All @@ -46,5 +47,5 @@ dev = "main"
debugpy = "^1.8.1"

[build-system]
requires = ["poetry-core"]
requires = ["poetry-core>=1.9"]
build-backend = "poetry.core.masonry.api"
616 changes: 0 additions & 616 deletions backend/requirements.txt

This file was deleted.

9 changes: 5 additions & 4 deletions backend/routers/filer.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,8 @@ async def record_csv(cik: str, headers: str = None):
header_hash = hash(headers_string)
file_name = f"wallstreetlocal-{cik}-{header_hash}.csv"
except Exception as e:
print(e)
report_error(cik, e)

raise HTTPException(
status_code=422, detail="Malformed headers, unable to process request."
)
Expand Down Expand Up @@ -601,7 +602,7 @@ async def record_filing_csv(cik: str, access_number: str, headers: str = None):
header_hash = hash(headers_string)
file_name = f"wallstreetlocal-{cik}{header_hash}.csv"
except Exception as e:
print(e)
report_error(cik, e)
raise HTTPException(
status_code=422, detail="Malformed headers, unable to process request."
)
Expand Down Expand Up @@ -631,7 +632,7 @@ async def top_ciks():
try:
filers_sorted = analysis.sort_and_format(top_cik_list)
except Exception as e:
print(e)
report_error("Top CIKs", e)
raise HTTPException(500, detail="Error fetching filers.")

return {"filers": filers_sorted}
Expand All @@ -648,7 +649,7 @@ async def popular_ciks():
try:
filers_sorted = analysis.sort_and_format(popular_cik_list)
except Exception as e:
print(e)
report_error("Popular CIKs", e)
raise HTTPException(500, detail="Error fetching filers.")

return {"filers": filers_sorted}
Expand Down
41 changes: 21 additions & 20 deletions backend/routers/lib/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def time_format(seconds: int) -> str:
return "-"


def serialize_stock(local_stock, global_stock):
def serialize_global(local_stock, global_stock):
cusip = local_stock["cusip"]
update = global_stock["update"]
ticker = global_stock["ticker"] if update else "NA"
Expand Down Expand Up @@ -105,10 +105,11 @@ def serialize_stock(local_stock, global_stock):
)

name = local_stock["name"]
ticker_str = f"{ticker} (Sold)" if sold and update else ticker

shares_held = local_stock["shares_held"]
market_value = local_stock["market_value"]
shares_held_str = local_stock["shares_held_str"]
market_value_str = local_stock["market_value_str"]

portfolio_percentage = local_stock.get("portfolio_percent")
portfolio_percentage = (
portfolio_percentage * 100
Expand All @@ -129,8 +130,6 @@ def serialize_stock(local_stock, global_stock):
if update and buy_timeseries != "NA"
else "NA"
)
shares_held_str = f"{int(shares_held):,}"
market_value_str = f"${int(market_value):,}"
portfolio_percentage_str = (
"{:.2f}".format(round(portfolio_percentage, 4))
if portfolio_percentage != "NA"
Expand All @@ -156,7 +155,6 @@ def serialize_stock(local_stock, global_stock):
"name": name,
"cusip": cusip,
"ticker": ticker,
"ticker_str": ticker_str,
"sector": sector,
"industry": industry,
"class": rights,
Expand Down Expand Up @@ -196,16 +194,16 @@ def serialize_local(

name = local_stock["name"]
cusip = local_stock["cusip"]
ticker = global_stock["ticker"]
ticker_str = global_stock["ticker_str"]

sector = global_stock["sector"]
industry = global_stock["industry"]
rights = local_stock["class"]
update = global_stock["update"]
shares_held = global_stock["shares_held"]
shares_held_str = global_stock["shares_held_str"]
market_value = global_stock["market_value"]
market_value_str = global_stock["market_value_str"]

shares_held = local_stock["shares_held"]
market_value = local_stock["market_value"]
shares_held_str = f"{int(shares_held):,}"
market_value_str = f"${int(market_value):,}"

recent_price = global_stock["recent_price"]
recent_price_str = global_stock["recent_price_str"]
Expand All @@ -222,6 +220,9 @@ def serialize_local(
portfolio_percentage = ratios["portfolio_percent"]
ownership_percentage = ratios["ownership_percent"]

ticker = global_stock["ticker"]
ticker_str = f"{ticker} (Sold)" if sold else ticker

buy_price = prices["buy"]
buy_float = buy_price["time"]
buy_date = datetime.fromtimestamp(buy_float)
Expand Down Expand Up @@ -285,7 +286,7 @@ def serialize_local(
"price_str": recent_price_str,
"gain_percent": gain_percent,
"gain_str": gain_percent_str,
}
},
},
}

Expand Down Expand Up @@ -525,7 +526,7 @@ def analyze_stocks(cik, filings):
"sold": sold_stamp,
}

updated_stock = serialize_stock(filing_stock, found_stock)
updated_stock = serialize_global(filing_stock, found_stock)
log_stock = {
"name": name,
"message": "Created Stock",
Expand Down Expand Up @@ -677,15 +678,15 @@ def sort_pipeline(
cwd = os.getcwd()


def create_json(content, filename):
file_path = f"{cwd}/static/filers/{filename}"
def create_json(content, file_name):
file_path = f"{cwd}/static/filers/{file_name}"
try:
with open(file_path, "r") as f: # @IgnoreException
filer_json = json.load(f)
if (datetime.now().timestamp() - filer_json["updated"]) > 60 * 60 * 3:
raise ValueError
except Exception as e:
print(e)
errors.report_error(file_name, e)
with open(file_path, "w") as r:
json.dump(content, r, indent=6)

Expand Down Expand Up @@ -751,7 +752,7 @@ def create_csv(content, file_name, headers=None):
cache.set_key(file_path, "bababooey", expire_time)
raise ValueError
except Exception as e:
print(e)
errors.report_error(file_name, e)
stock_list = create_dataframe(content, headers)
with open(file_path, "w") as f:
writer = csv.writer(f)
Expand Down Expand Up @@ -815,7 +816,7 @@ def sort_and_format(filer_ciks):
)
for filer in filers_sorted:
try:
filer["date"] = datetime.utcfromtimestamp(filer["updated"]).strftime(
filer["date"] = datetime.fromtimestamp(filer["updated"]).strftime(
"%Y-%m-%d"
)
market_value = filer.get("market_value", 0)
Expand All @@ -824,7 +825,7 @@ def sort_and_format(filer_ciks):
)
filer.pop("_id", None)
except Exception as e:
print(e)
errors.report_error(cik, e)
filer["date"] = "NA"
filer["market_value"] = "NA"
return filers_sorted
Expand Down
49 changes: 20 additions & 29 deletions backend/routers/lib/search.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import meilisearch_python_sdk
import meilisearch_python_sdk as meilisearch

import os

from . import errors

ENVIRONMENT = os.environ.get("ENVIRONMENT", "development")
production_environment = True if ENVIRONMENT == "production" else False

Expand All @@ -10,40 +12,29 @@


def _prepare_meilisearch():
client = meilisearch_python_sdk.Client(MEILI_SERVER_URL, MEILI_MASTER_KEY)
client = meilisearch.Client(MEILI_SERVER_URL, MEILI_MASTER_KEY)
companies_index = client.index("companies")
indexes = client.get_indexes()
if not indexes or "companies" not in [index.uid for index in indexes]:
client.create_index("companies", primary_key="cik")
companies_index.update(primary_key="cik")
companies_index.update_displayed_attributes(
[
"name",
"cik",
"tickers",
]
)
companies_index.update_searchable_attributes(["name", "tickers", "cik"])
companies_index.update_filterable_attributes(["thirteen_f"])
client.create_index("companies", "cik")
try:
companies_index.update(primary_key="cik")
companies_index.update_displayed_attributes(
[
"name",
"cik",
"tickers",
]
)
companies_index.update_searchable_attributes(["name", "tickers", "cik"])
companies_index.update_filterable_attributes(["thirteen_f"])
except Exception as e:
errors.report_error(e)


_prepare_meilisearch()

try:
retries = 3
while retries:
search = meilisearch_python_sdk.Client(MEILI_SERVER_URL, MEILI_MASTER_KEY)
search.create_index("companies", primary_key="cik")
companies_index = search.index("companies")
companies_index.add_documents([{"cik": "TEST"}])
retries -= 1
raise RuntimeError # @IgnoreException
except RuntimeError:
search = meilisearch_python_sdk.Client(MEILI_SERVER_URL, MEILI_MASTER_KEY)
companies_index = search.index("companies")

# search = meilisearch_python_sdk.AsyncClient(MEILI_SERVER_URL, MEILI_MASTER_KEY)
# companies_index = search.index("companies")
search = meilisearch.Client(MEILI_SERVER_URL, MEILI_MASTER_KEY)
companies_index = search.index("companies")


def ping():
Expand Down
2 changes: 1 addition & 1 deletion backend/routers/stocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ async def stock_info(
)
cursor = database.search_filers(pipeline)
except Exception as e:
print(e)
report_error(cik, e)
cursor = []
count = 0

Expand Down
10 changes: 2 additions & 8 deletions backend/routers/utils.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,12 @@
import logging
import pymongo.errors
import requests
import json
import os
import threading

from tqdm import tqdm
from dotenv import load_dotenv
import time
import functools

import redis
import meilisearch_python_sdk
import pymongo
import uvicorn

import sentry_sdk
Expand Down Expand Up @@ -252,7 +246,7 @@ def insert_search(document_list):
with open(top_ciks_path, "w") as f:
json.dump(data, f)
except Exception as e:
print(e)
errors.report_error("Gist Loading", e)
try:
r = requests.get(
"https://gist.githubusercontent.com/leftmove/daca5d470c869e9d6f14c298af809f9f/raw/wallstreetlocal-popular-filers.json"
Expand All @@ -263,7 +257,7 @@ def insert_search(document_list):
with open(popular_ciks_path, "w") as f:
json.dump(data, f)
except Exception as e:
print(e)
errors.report_error("Gist Loading", e)

print("Calculating Statistics ...")
create_latest = database.find_statistics(
Expand Down
Loading

0 comments on commit eb51219

Please sign in to comment.