Skip to content

Simple script to download and install datadb demo data #186

Simple script to download and install datadb demo data

Simple script to download and install datadb demo data #186

Workflow file for this run

name: End-to-End Tests
#on: [push, pull_request]
on: [push]
#on:
# pull_request_target:
# types: [labeled]
jobs:
test:
runs-on: ubuntu-latest
environment: "GitHub Actions 1"
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ASSISTANTS_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ASSISTANTS_API_TYPE: ${{ secrets.ASSISTANTS_API_TYPE }}
ASSISTANTS_ID: ${{ secrets.ASSISTANTS_ID }}
ASSISTANTS_BASE_URL: ${{ secrets.ASSISTANTS_BASE_URL }}
ASSISTANTS_MODEL: ${{ secrets.ASSISTANTS_MODEL }}
ASSISTANTS_BOT_NAME: ${{ secrets.ASSISTANTS_BOT_NAME }}
POSTGRES_DATA_HOST: ${{ secrets.POSTGRES_DATA_HOST }}
POSTGRES_DATA_PORT: ${{ secrets.POSTGRES_DATA_PORT }}
POSTGRES_DATA_DB: ${{ secrets.POSTGRES_DATA_DB }}
POSTGRES_DATA_USER: ${{ secrets.POSTGRES_DATA_USER }}
POSTGRES_DATA_PASSWORD: ${{ secrets.POSTGRES_DATA_PASSWORD }}
DATA_DB_CONN_STRING: ${{ secrets.DATA_DB_CONN_STRING }}
POSTGRES_RECIPE_HOST: ${{ secrets.POSTGRES_RECIPE_HOST }}
POSTGRES_RECIPE_PORT: ${{ secrets.POSTGRES_RECIPE_PORT }}
POSTGRES_RECIPE_DB: ${{ secrets.POSTGRES_RECIPE_DB }}
POSTGRES_RECIPE_USER: ${{ secrets.POSTGRES_RECIPE_USER }}
POSTGRES_RECIPE_PASSWORD: ${{ secrets.POSTGRES_RECIPE_PASSWORD }}
RECIPE_DB_CONN_STRING: "postgresql://${{ secrets.POSTGRES_RECIPE_USER }}:${{ secrets.POSTGRES_RECIPE_PASSWORD }}@${{ secrets.POSTGRES_RECIPE_HOST }}:${{ secrets.POSTGRES_RECIPE_PORT }}/${{ secrets.POSTGRES_RECIPE_DB }}"
RECIPES_OPENAI_API_TYPE: ${{ secrets.RECIPES_OPENAI_API_TYPE }}
RECIPES_OPENAI_API_KEY: ${{ secrets.RECIPES_OPENAI_API_KEY }}
RECIPES_MODEL: ${{ secrets.RECIPES_MODEL }}
RECIPES_OPENAI_TEXT_COMPLETION_DEPLOYMENT_NAME: ${{ secrets.RECIPES_OPENAI_TEXT_COMPLETION_DEPLOYMENT_NAME }}
RECIPES_MEMORY_SIMILARITY_CUTOFF: ${{ secrets.RECIPES_MEMORY_SIMILARITY_CUTOFF }}
RECIPES_RECIPE_SIMILARITY_CUTOFF: ${{ secrets.RECIPES_RECIPE_SIMILARITY_CUTOFF }}
RECIPES_HELPER_FUNCTION_SIMILARITY_CUTOFF: ${{ secrets.RECIPES_HELPER_FUNCTION_SIMILARITY_CUTOFF }}
RECIPES_MODEL_TEMP: ${{ secrets.RECIPES_MODEL_TEMP }}
RECIPES_MODEL_MAX_TOKENS: ${{ secrets.RECIPES_MODEL_MAX_TOKENS }}
IMAGE_HOST: ${{ secrets.IMAGE_HOST }}
RECIPE_SERVER_API: ${{ secrets.RECIPE_SERVER_API }}
CHAINLIT_AUTH_SECRET: ${{ secrets.CHAINLIT_AUTH_SECRET }}
USER_LOGIN: ${{ secrets.USER_LOGIN }}
USER_PASSWORD: ${{ secrets.USER_PASSWORD }}
COLUMNS: 200
steps:
- name: Checkout
uses: actions/checkout@v3
#- name: Checkout integration tests data
# uses: actions/checkout@master
# with:
# repository: datakind/recipes-ai-test-data
# ssh-key: ${{ secrets.GITHUB_SSH_PRIVATE_KEY}}
# path: recipes-ai-test-data
- name: Expose GitHub Runtime
uses: crazy-max/ghaction-github-runtime@v2
- name: Spin up DB and recipes server
run: |
env > .env
echo "Installing demo data ..."
pip3 install gdown==5.2.0
cd data && python3 download_demo_data.py && cd ..
ls data/datadb
mkdir -p ./flows/chainlit-ui-evaluation/recipes/public
# TODO this should be enhanced to use a buildx bake to leverage layer caching for faster builds, or push to repo and simply have a pull for the run
# TODO docker-compose files should be refactored to use scopes instead of different versions for each environment
echo "Starting docker containers for dbs and server ..."
docker-compose -f ./docker-compose-github.yml pull
docker-compose -f ./docker-compose-github.yml up -d --build
echo "logs datadb ..."
docker-compose -f docker-compose-github.yml logs datadb
docker ps
# TODO The promptflow docker build wasn't working in GH actions, so deploying promptflow to host for now
- name: Set up promtpflow and run tests
uses: actions/setup-python@v4
with:
python-version: "3.11.4"
- run: |
echo "Installing promptflow packages ..."
pip3 install promptflow==1.12.0
pip3 install promptflow-tools==1.4.0
pip3 install chainlit==1.1.305
pip3 install langchain==0.2.1
pip3 install langchain_openai==0.1.7
pip3 install psycopg2_binary==2.9.9
pip3 install keyrings.alt
pip3 list
- run: |
echo "Setting up folders ..."
cd flows/chainlit-ui-evaluation/
cp ../../ui/chat-chainlit-assistant/app.py .
cp -r ../../utils .
cp -r ../../templates .
cp ../../management/skills.py .
echo "Setting up Promptflow connections ..."
pf connection create --file ./openai.yaml --set api_key=$OPENAI_API_KEY --name open_ai_connection
pf connection create --file ./azure_openai.yaml --set api_key=$OPENAI_API_KEY --set api_base=$OPENAI_API_ENDPOINT --name azure_openai
# Test running one node with default inputs. Good for debugging GH actions
#pf flow test --flow . --node call_assistant
#python3 call_assistant.py --chat_history '[{"author": "user","content": "Hi!"}, {"author": "user","content": "What is the total population of Mali"}]'
#python3 call_assistant.py --chat_history '[{"author": "user","content": "plot a line chart of fatalities by month for Chad using HDX data as an image"}]'
# This runs a few, with the script kill, like promptflow, but prints all debug. Good for testing.
# python3 call_assistant_debug.py
echo "Starting Promptflow batch run using data.jsonl ..."
pf run create --flow . --data ./data.jsonl --stream --column-mapping query='${data.query}' context='${data.context}' chat_history='${data.chat_history}' --name base_run
- run: |
echo "Promptflow results ..."
cd flows/chainlit-ui-evaluation/
pf run show-details -n base_run
echo "Getting metrics ..."
pf run show-metrics -n base_run
##pf run visualize -n base_run
echo "Checking results ..."
python3 check_evaluation_results.py