Skip to content

Commit

Permalink
feat: apply the generated manifests to cluster
Browse files Browse the repository at this point in the history
  • Loading branch information
feiskyer committed Jul 8, 2023
1 parent 76fb73c commit d8de482
Show file tree
Hide file tree
Showing 13 changed files with 161 additions and 54 deletions.
30 changes: 30 additions & 0 deletions .github/workflows/release.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
name: Release

on:
push:
tags:
- "v*.*.*"

env:
REGISTRY: ghcr.io

jobs:
build-push-image:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Log in to the Container registry
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
run: |
docker build -t ${{ env.REGISTRY }}/${{ github.repository_owner }}/kube-copilot:${{ github.ref_name }} .
docker push ${{ env.REGISTRY }}/${{ github.repository_owner }}/kube-copilot:${{ github.ref_name }}
14 changes: 12 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@
run:
poetry run kube-copilot $(ARGS)

.PHONY: web
web:
streamlit run web/Home.py

.PHONY: build
build:
poetry build
Expand All @@ -12,9 +16,16 @@ build:
install: build
pip install --force-reinstall --no-deps dist/$(shell ls -t dist | head -n 1)

.PHONY: versioning
versioning:
yq -i ".image.tag = \"v$(shell poetry version -s)\"" ./helm/kube-copilot/values.yaml
yq -i ".version = \"$(shell poetry version -s)\"" ./helm/kube-copilot/Chart.yaml
yq -i ".appVersion = \"$(shell poetry version -s)\"" ./helm/kube-copilot/Chart.yaml

.PHONY: publish
publish: build
poetry publish
gh release create v$(shell poetry version -s)

.PHONY: release-helm
release-helm:
Expand All @@ -32,8 +43,7 @@ release-helm:
git checkout main

.PHONY: release
release: publish release-helm
gh release create v$(shell poetry version -s)
release: versioning publish release-helm

.PHONY: clean
clean:
Expand Down
3 changes: 2 additions & 1 deletion helm/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ helm install kube-copilot kube-copilot \
| resources | object | `{}` | |
| service.port | int | `80` | |
| service.type | string | `"ClusterIP"` | |
| serviceAccount.create | bool | `true` | |
| serviceAccount.create | bool | `true` | Create the service account |
| serviceAccount.admin | bool | `true` | Create the admin RBAC binding (readonly RBAC would be set if set to false) |
| serviceAccount.name | string | `"kube-copilot"` | |
| tolerations | list | `[]` | |
7 changes: 2 additions & 5 deletions helm/kube-copilot/Chart.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
apiVersion: v2
name: kube-copilot
description: Kubernetes Copilot powered by OpenAI

# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
Expand All @@ -11,14 +10,12 @@ description: Kubernetes Copilot powered by OpenAI
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application

# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.18

version: 0.1.19
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "0.1.18"
appVersion: 0.1.19
13 changes: 13 additions & 0 deletions helm/kube-copilot/templates/serviceaccount.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,18 @@ kind: ClusterRole
metadata:
name: {{ include "kube-copilot.serviceAccountName" . }}-reader
rules:
{{- if .Values.serviceAccount.admin -}}
- apiGroups:
- '*'
resources:
- '*'
verbs:
- '*'
- nonResourceURLs:
- '*'
verbs:
- '*'
{{- else }}
- apiGroups:
- '*'
resources:
Expand All @@ -29,6 +41,7 @@ rules:
verbs:
- 'get'
- 'list'
{{- end }}
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
Expand Down
53 changes: 22 additions & 31 deletions helm/kube-copilot/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,57 +3,51 @@
# Declare variables to be passed into your templates.

replicaCount: 1

image:
repository: ghcr.io/feiskyer/kube-copilot
pullPolicy: Always
tag: "latest"

tag: v0.1.19
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""

openai:
apiModel: "gpt-4"
apiBase: ""
apiKey: ""

google:
apiKey: ""
cseId: ""

serviceAccount:
# Specifies whether a service account should be created
create: true
# Specifies whether to create admin role binding
admin: true
# Annotations to add to the service account
annotations: {}
# The name of the service account to use.
# If not set and create is true, a name is generated using the fullname template
name: "kube-copilot"

podAnnotations: {}

podSecurityContext: {}
# fsGroup: 2000
# fsGroup: 2000

securityContext: {}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000

service:
type: ClusterIP
port: 80

ingress:
enabled: false
className: ""
annotations: {}
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
hosts:
- host: chart-example.local
paths:
Expand All @@ -63,21 +57,18 @@ ingress:
# - secretName: chart-example-tls
# hosts:
# - chart-example.local

resources: {}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi

nodeSelector: {}

tolerations: []

affinity: {}
41 changes: 41 additions & 0 deletions kube_copilot/labeler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
from langchain.callbacks.streamlit.streamlit_callback_handler import ToolRecord, LLMThoughtLabeler


CHECKMARK_EMOJI = "✅"
THINKING_EMOJI = ":thinking_face:"
HISTORY_EMOJI = ":books:"
EXCEPTION_EMOJI = "⚠️"


class CustomLLMThoughtLabeler(LLMThoughtLabeler):
def get_tool_label(self, tool: ToolRecord, is_complete: bool) -> str:
"""Return the label for an LLMThought that has an associated
tool.
Parameters
----------
tool
The tool's ToolRecord
is_complete
True if the thought is complete; False if the thought
is still receiving input.
Returns
-------
The markdown label for the thought's container.
"""
input = tool.input_str.strip()
name = tool.name
emoji = CHECKMARK_EMOJI if is_complete else THINKING_EMOJI
if name == "_Exception":
emoji = EXCEPTION_EMOJI
name = "Parsing error"
# idx = min([60, len(input)])
# input = input[0:idx]
# if len(tool.input_str) > idx:
# input = input + "..."
# input = input.replace("\n", " ")
label = f"{emoji} **{name}:** {input}"
return label
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "kube-copilot"
version = "0.1.18"
version = "0.1.19"
description = "Kubernetes Copilot"
authors = ["Pengfei Ni <feiskyer@gmail.com>"]
readme = "README.md"
Expand Down
4 changes: 3 additions & 1 deletion web/Home.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from kube_copilot.llm import init_openai
from kube_copilot.prompts import get_prompt
from kube_copilot.kubeconfig import setup_kubeconfig
from kube_copilot.labeler import CustomLLMThoughtLabeler

# setup logging
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
Expand Down Expand Up @@ -60,7 +61,8 @@
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)

st_cb = StreamlitCallbackHandler(st.container())
st_cb = StreamlitCallbackHandler(
st.container(), thought_labeler=CustomLLMThoughtLabeler())
chain = ReActLLM(model=model,
verbose=True,
enable_python=True,
Expand Down
3 changes: 2 additions & 1 deletion web/pages/Analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from kube_copilot.chains import ReActLLM
from kube_copilot.llm import init_openai
from kube_copilot.prompts import get_analyze_prompt
from kube_copilot.labeler import CustomLLMThoughtLabeler

logging.basicConfig(stream=sys.stdout, level=logging.CRITICAL)
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
Expand Down Expand Up @@ -59,7 +60,7 @@
st.stop()

prompt = get_analyze_prompt(namespace, resource_type, resource_name)
st_cb = StreamlitCallbackHandler(st.container())
st_cb = StreamlitCallbackHandler(st.container(), thought_labeler=CustomLLMThoughtLabeler())
chain = ReActLLM(model=model,
verbose=True,
enable_python=True,
Expand Down
3 changes: 2 additions & 1 deletion web/pages/Audit.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from kube_copilot.chains import ReActLLM
from kube_copilot.llm import init_openai
from kube_copilot.prompts import get_audit_prompt
from kube_copilot.labeler import CustomLLMThoughtLabeler

logging.basicConfig(stream=sys.stdout, level=logging.CRITICAL)
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
Expand Down Expand Up @@ -56,7 +57,7 @@
st.stop()

prompt = get_audit_prompt(namespace, pod)
st_cb = StreamlitCallbackHandler(st.container())
st_cb = StreamlitCallbackHandler(st.container(), thought_labeler=CustomLLMThoughtLabeler())
chain = ReActLLM(model=model,
verbose=True,
enable_python=False,
Expand Down
3 changes: 2 additions & 1 deletion web/pages/Diagnose.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from kube_copilot.chains import ReActLLM
from kube_copilot.llm import init_openai
from kube_copilot.prompts import get_diagnose_prompt
from kube_copilot.labeler import CustomLLMThoughtLabeler

logging.basicConfig(stream=sys.stdout, level=logging.CRITICAL)
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
Expand Down Expand Up @@ -56,7 +57,7 @@
st.stop()

prompt = get_diagnose_prompt(namespace, pod)
st_cb = StreamlitCallbackHandler(st.container())
st_cb = StreamlitCallbackHandler(st.container(), thought_labeler=CustomLLMThoughtLabeler())
# chain = PlanAndExecuteLLM(model=model, enable_python=True)
chain = ReActLLM(model=model,
verbose=True,
Expand Down
Loading

0 comments on commit d8de482

Please sign in to comment.