Skip to content

Commit

Permalink
no message
Browse files Browse the repository at this point in the history
  • Loading branch information
esteininger committed Mar 19, 2024
1 parent 7144672 commit b761ca7
Show file tree
Hide file tree
Showing 7 changed files with 81 additions and 2,826 deletions.
2 changes: 1 addition & 1 deletion src/api/cloud_services/aws/serverless.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def create_with_s3(self, runtime, function_name, s3_bucket, s3_key, tags={}):
Tags=tags,
Layers=self.layer_arns,
)
return "response"
return response

except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
Expand Down
6 changes: 0 additions & 6 deletions src/api/listeners/service.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,9 @@
from fastapi import HTTPException
from db_internal.service import BaseAsyncDBService

from utilities.internal_requests import AsyncHttpClient

from config import listener_url
from unstructured.partition.auto import partition

from unstructured.partition.api import partition_via_api
import requests
import json
from io import BytesIO


class ListenerAsyncService(BaseAsyncDBService):
Expand Down
14 changes: 4 additions & 10 deletions src/api/parse/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,17 @@ async def get_file_size(url):
if size is not None:
return int(size)
else:
raise NotFoundError(status_code=400, detail="File not found")
raise NotFoundError({"error": "File not found"})


class ParseHandler:
def __init__(self, index_id):
self.index_id = index_id
self.url = f"{parser_url}/file"
print(self.url)

async def send_to_parser(self, payload):
if await get_file_size(payload["file_url"]) > file_size_limit:
raise BadRequestError(status_code=400, detail="File size exceeds limit")
# if await get_file_size(payload["file_url"]) > file_size_limit:
# raise BadRequestError({"error": "File too large"})

payload["index_id"] = self.index_id

Expand All @@ -35,9 +34,4 @@ async def send_to_parser(self, payload):
self.url,
json=payload,
)
if response.status_code == 200:
data = response.json()
return data
else:
print(response.json())
raise InternalServerError(status_code=400, detail="Error fetching file")
return response.json()
2,870 changes: 67 additions & 2,803 deletions src/api/poetry.lock

Large diffs are not rendered by default.

3 changes: 1 addition & 2 deletions src/api/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[tool.poetry]
name = "connectors"
name = "api"
version = "0.1.0"
description = ""
authors = ["Ethan Steininger <esteininger21@gmail.com>"]
Expand All @@ -24,7 +24,6 @@ boto3 = "1.33.1"
aioboto3 = "12.1.0"
openai = "^1.14.1"
psycopg2 = "^2.9.9"
unstructured = {extras = ["all-docs"], version = "^0.12.6"}
bandit = "^1.7.8"

[build-system]
Expand Down
2 changes: 1 addition & 1 deletion src/api/utilities/zipper.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def __init__(self, data):
self.data = data

def call_endpoint(self):
response = requests.post(f"{parser_url}/process/package", json=self.data)
response = requests.post(f"{parser_url}/package", json=self.data)
return response.json()

def load_zip_in_memory(self, s3_path):
Expand Down
10 changes: 7 additions & 3 deletions src/api/workflows/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
from .model import WorkflowCreateRequest
from .utilities import CodeHandler

from _exceptions import InternalServerError


from utilities.helpers import generate_function_name, current_time

Expand Down Expand Up @@ -38,12 +40,14 @@ def create(self, workflow_request):
code_handler._validate_code()

# upload to s3
s3_dict = code_handler._create_zip_package(
response = code_handler._create_zip_package(
new_workflow.settings.requirements,
new_workflow.settings.python_version,
)["data"]
)
if response["status"] == "error":
raise InternalServerError(response["error"])

# print(s3_dict)
s3_dict = response["response"]

# create lambda function
code_handler.create_lambda_function(s3_dict["bucket"], s3_dict["key"])
Expand Down

0 comments on commit b761ca7

Please sign in to comment.