diff --git a/README.md b/README.md
index 93deeb6..92551a5 100644
--- a/README.md
+++ b/README.md
@@ -2,6 +2,8 @@
## Overview
+[![Build and deploy Node.js app to Azure Web App - maht](https://github.com/MyLife-Services/mylife-maht/actions/workflows/azure-deploy-prod_maht.yml/badge.svg?branch=azure-deploy-prod)](https://github.com/MyLife-Services/mylife-maht/actions/workflows/azure-deploy-prod_maht.yml)
+
### MyLife: Preserving Member Stories for Posterity
MyLife is a groundbreaking initiative aimed at capturing and preserving the essence of human experiences for future generations. We believe in the power of personal stories, memories, and media to shape our understanding of the human condition. Our platform provides a unique, enduring, and internet-based solution for individuals to record and showcase their life stories.
diff --git a/inc/js/mylife-avatar.mjs b/inc/js/mylife-avatar.mjs
index 21570c8..96d1a16 100644
--- a/inc/js/mylife-avatar.mjs
+++ b/inc/js/mylife-avatar.mjs
@@ -321,10 +321,7 @@ class Avatar extends EventEmitter {
/**
* Returns all conversations of a specific-type stored in memory.
* @param {string} type - Type of conversation: chat, experience, dialog, inter-system, etc.; defaults to `chat`.
- * @returns {Conversation[]} - The array of conversation objects.
- */
- getConversations(type='chat'){
- return this.#conversations
+ * @retu .replace(/\[.*?†.*?\]/gs, '') // This line removes OpenAI LLM "source" referencesLM "source" references.#conversations
.filter(_=>_?.type===type)
}
/**
@@ -1995,18 +1992,22 @@ function mPruneMessage(bot, message, type='chat', processStartTime=Date.now()){
/* parse message */
const { bot_id: activeBotAIId, id: activeBotId, } = bot
let agent='server',
+ content='',
purpose=type,
response_time=Date.now()-processStartTime
const { content: messageContent, thread_id, } = message
- const content = Array.isArray(messageContent)
+ const rSource = /【.*?\】/gs
+ const rLines = /\n{2,}/g
+ content = Array.isArray(messageContent)
? messageContent.reduce((acc, item) => {
- if (item?.type==='text' && item?.text?.value) {
+ if (item?.type==='text' && item?.text?.value){
acc += item.text.value + '\n'
}
return acc
}, '')
- .replace(/\n{2,}/g, '\n')
: messageContent
+ content = content.replace(rLines, '\n')
+ .replace(rSource, '') // This line removes OpenAI LLM "source" references
message = new Marked().parse(content)
const messageResponse = {
activeBotId,
diff --git a/views/assets/png/favicon.png b/views/assets/png/favicon.png
new file mode 100644
index 0000000..15e3b10
Binary files /dev/null and b/views/assets/png/favicon.png differ
diff --git a/views/layout.html b/views/layout.html
index 4479dfa..e7e46d9 100644
--- a/views/layout.html
+++ b/views/layout.html
@@ -8,6 +8,7 @@
+