Hey all, I built an AI assistant and now I would like to link the code to my Wordpress website. Being specific, I would like when the user clicks the classic widget for the support in the homepage, a chat interface is opened and the user can start to chat with the AI assistant.
Does anyone know how to do it? Thanks in advance
this is the “main.py” code of my AI Assistant hosted on replit:
from flask import Flask, render_template, request, jsonify
import os
import qdrant_client
from langchain_community.vectorstores import Qdrant
from langchain_openai.embeddings import OpenAIEmbeddings
from langchain.text_splitter import CharacterTextSplitter
from langchain.chains import RetrievalQA
from langchain_openai.llms import OpenAI
import re
import logging
import random
from functions import preprocess_query, filter_bot_response
# Configure basic logging
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
level=logging.INFO)
app = Flask(__name__)
# Global variable to track the first message after server restart
first_message_received = False
# Environment variables set in Repl's Secrets
QDRANT_HOST = os.environ['QDRANT_HOST']
QDRANT_API_KEY = os.environ['QDRANT_API_KEY']
QDRANT_COLLECTION = os.environ['QDRANT_COLLECTION']
OPENAI_API_KEY = os.environ['OPENAI_API_KEY']
# Create Qdrant client and collection
client = qdrant_client.QdrantClient(os.getenv("QDRANT_HOST"), api_key=os.getenv("QDRANT_API_KEY"))
collection_config = qdrant_client.http.models.VectorParams(size=1536, distance=qdrant_client.http.models.Distance.COSINE)
client.recreate_collection(collection_name=os.getenv("QDRANT_COLLECTION"), vectors_config=collection_config)
# Create vector store
embeddings = OpenAIEmbeddings()
vectorstore = Qdrant(client=client, collection_name=os.getenv("QDRANT_COLLECTION"), embeddings=embeddings)
# Set up the retrieval-based QA chain
qa = RetrievalQA.from_chain_type(llm=OpenAI(), chain_type="stuff", retriever=vectorstore.as_retriever())
# Add documents to your vector database
def get_chunks(text):
separator = "\n\n"
qna_pairs = text.split(separator)
chunks = [pair for pair in qna_pairs if pair]
return chunks
with open("text.txt") as f:
raw_text = f.read()
texts = get_chunks(raw_text)
vectorstore.add_texts(texts)
# Plug the vector store to your retrieval chain
qa = RetrievalQA.from_chain_type(llm=OpenAI(), chain_type="stuff", retriever=vectorstore.as_retriever())
# Flask routes
@app.route('/')
def index():
return render_template('index.html')
KEYWORDS = [
""Data"
]
def contains_keyword(text):
return any(keyword.lower() in text.lower() for keyword in KEYWORDS)
# Example usage within the Flask route:
@app.route('/query', methods=['POST'])
def query():
data = request.json
query_text = preprocess_query(data['query'])
# Log the received query
logging.info(f"Received query: {query_text}")
# Retrieve the functions' response
response = qa.invoke(query_text)
response_text = response.get('result', '')
# Now use response_text for further processing
bot_response = response_text
# Check for special responses and replace if needed
bot_response = filter_bot_response(bot_response)
# Check for keywords in both the query and the response
if not contains_keyword(query_text) and not contains_keyword(bot_response):
# Select one of the preset messages randomly
preset_messages = [
"I'm trained",
"Sorry",
"Please"
]
bot_response = random.choice(preset_messages)
# Log the final response
logging.info(f"Bot response: {response}")
return jsonify({'message': bot_response})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080, debug=True)