Skip to content

Commit

Permalink
Merge pull request #215 from nulib/deploy/staging
Browse files Browse the repository at this point in the history
Deploy to production
  • Loading branch information
mbklein authored Jun 11, 2024
2 parents 6eda227 + a5a90bc commit 5833be5
Show file tree
Hide file tree
Showing 7 changed files with 55 additions and 53 deletions.
1 change: 1 addition & 0 deletions chat/dependencies/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
boto3~=1.34.13
honeybadger
langchain
langchain-community
openai~=0.27.8
Expand Down
68 changes: 32 additions & 36 deletions chat/src/handlers/chat.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
import boto3
import json
import logging
import os
import sys
import traceback
from datetime import datetime
from event_config import EventConfig
from helpers.response import prepare_response
from honeybadger import honeybadger

honeybadger.configure()
logging.getLogger('honeybadger').addHandler(logging.StreamHandler())

RESPONSE_TYPES = {
"base": ["answer", "ref"],
Expand All @@ -14,43 +17,36 @@
}

def handler(event, context):
try:
config = EventConfig(event)
socket = event.get('socket', None)
config.setup_websocket(socket)
config = EventConfig(event)
socket = event.get('socket', None)
config.setup_websocket(socket)

if not config.is_logged_in:
config.socket.send({"type": "error", "message": "Unauthorized"})
return {"statusCode": 401, "body": "Unauthorized"}
debug_message = config.debug_message()
if config.debug_mode:
config.socket.send(debug_message)
if not config.is_logged_in:
config.socket.send({"type": "error", "message": "Unauthorized"})
return {"statusCode": 401, "body": "Unauthorized"}

debug_message = config.debug_message()
if config.debug_mode:
config.socket.send(debug_message)

if not os.getenv("SKIP_WEAVIATE_SETUP"):
config.setup_llm_request()
final_response = prepare_response(config)
config.socket.send(reshape_response(final_response, 'debug' if config.debug_mode else 'base'))
if not os.getenv("SKIP_WEAVIATE_SETUP"):
config.setup_llm_request()
final_response = prepare_response(config)
config.socket.send(reshape_response(final_response, 'debug' if config.debug_mode else 'base'))

log_group = os.getenv('METRICS_LOG_GROUP')
log_stream = context.log_stream_name
if log_group and ensure_log_stream_exists(log_group, log_stream):
log_client = boto3.client('logs')
log_message = reshape_response(final_response, 'log')
log_events = [
{
'timestamp': timestamp(),
'message': json.dumps(log_message)
}
]
log_client.put_log_events(logGroupName=log_group, logStreamName=log_stream, logEvents=log_events)
return {"statusCode": 200}

except Exception:
exc_info = sys.exc_info()
err_text = ''.join(traceback.format_exception(*exc_info))
print(err_text)
return {"statusCode": 500, "body": f'Unhandled error:\n{err_text}'}
log_group = os.getenv('METRICS_LOG_GROUP')
log_stream = context.log_stream_name
if log_group and ensure_log_stream_exists(log_group, log_stream):
log_client = boto3.client('logs')
log_message = reshape_response(final_response, 'log')
log_events = [
{
'timestamp': timestamp(),
'message': json.dumps(log_message)
}
]
log_client.put_log_events(logGroupName=log_group, logStreamName=log_stream, logEvents=log_events)
return {"statusCode": 200}

def reshape_response(response, type):
return {k: response[k] for k in RESPONSE_TYPES[type]}
Expand Down
1 change: 1 addition & 0 deletions chat/src/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# Runtime Dependencies
boto3~=1.34.13
honeybadger
langchain
langchain-community
openai~=0.27.8
Expand Down
15 changes: 15 additions & 0 deletions chat/template.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,18 @@ Parameters:
Type: String
Description: Prefix for Index names
Default: ""
HoneybadgerApiKey:
Type: String
Description: Honeybadger API Key
Default: ""
HoneybadgerEnv:
Type: String
Description: Honeybadger Environment
Default: ""
HoneybadgerRevision:
Type: String
Description: Honeybadger Revision
Default: ""
OpenSearchEndpoint:
Type: String
Description: OpenSearch Endpoint
Expand Down Expand Up @@ -203,6 +215,9 @@ Resources:
AZURE_OPENAI_LLM_DEPLOYMENT_ID: !Ref AzureOpenaiLlmDeploymentId
AZURE_OPENAI_RESOURCE_NAME: !Ref AzureOpenaiResourceName
ENV_PREFIX: !Ref EnvironmentPrefix
HONEYBADGER_API_KEY: !Ref HoneybadgerApiKey
HONEYBADGER_ENVIRONMENT: !Ref HoneybadgerEnv
HONEYBADGER_REVISION: !Ref HoneybadgerRevision
METRICS_LOG_GROUP: !Ref ChatMetricsLog
OPENSEARCH_ENDPOINT: !Ref OpenSearchEndpoint
OPENSEARCH_MODEL_ID: !Ref OpenSearchModelId
Expand Down
14 changes: 0 additions & 14 deletions chat/test/handlers/test_chat.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
# ruff: noqa: E402

import contextlib
from io import StringIO
import json
import os
import sys
Expand Down Expand Up @@ -71,15 +69,3 @@ def test_handler_debug_mode_for_superusers_only(self, mock_is_debug_enabled, moc
handler(event, MockContext())
response = json.loads(mock_client.received_data)
self.assertEqual(response["type"], "error")

@patch.object(EventConfig, 'setup_websocket')
def test_error_handling(self, mock_event):
mock_event.side_effect = Exception("Some error occurred")
capture = StringIO()
with contextlib.redirect_stdout(capture):
response = handler({}, {})
self.assertEqual(response['statusCode'], 500)
self.assertIn('Unhandled error:', response['body'])
self.assertIn('Exception: Some error occurred', response['body'])
self.assertIn('Exception: Some error occurred', capture.getvalue())

6 changes: 3 additions & 3 deletions node/src/handlers/get-file-set-download.js
Original file line number Diff line number Diff line change
Expand Up @@ -112,9 +112,9 @@ async function getDownloadLink(doc) {
const getObjectParams = {
Bucket: bucket,
Key: key,
ResponseContentDisposition: `attachment; filename=${
doc._source.label
}.${mime.extension(doc._source.mime_type)}`,
ResponseContentDisposition: `attachment; filename=download.${mime.extension(
doc._source.mime_type
)}`,
};

const client = new S3Client(clientParams);
Expand Down
3 changes: 3 additions & 0 deletions template.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -669,6 +669,9 @@ Resources:
AzureOpenaiLlmDeploymentId: !Ref AzureOpenaiLlmDeploymentId
AzureOpenaiResourceName: !Ref AzureOpenaiResourceName
EnvironmentPrefix: !Ref EnvironmentPrefix
HoneybadgerApiKey: !Ref HoneybadgerApiKey
HoneybadgerEnv: !Ref HoneybadgerEnv
HoneybadgerRevision: !Ref HoneybadgerRevision
OpenSearchEndpoint: !Ref OpenSearchEndpoint
OpenSearchModelId: !Ref OpenSearchModelId
chatWebsocketEndpoint:
Expand Down

0 comments on commit 5833be5

Please sign in to comment.