import os from groq import Groq import gradio as gr from config import GROQ_API_KEY class ConversationalAI: def __init__(self): os.environ["GROQ_API_KEY"] = GROQ_API_KEY self.client = Groq() self.system_prompt = { "role": "system", "content": "# I want you to act as a content marketing consultant. # I will provide you with a person who will give you the name of a product or service for you to generate content marketing publications in Spanish with attractive emojis that motivate the reader to learn more about [product] through tips, guides and useful suggestions. # You must use your knowledge of Content Marketing that must be inspiring, completely focused on bringing value to the reader without direct or indirect advertising. # Generate long content, at least 5 short relevant paragraphs. Check that the previous content is not repeated. # Generate content with paragraphs between 10 and 20 words. Check that previous content is not repeated. # Use attractive emojis and titles such as: \"The 5 best tricks for [action]\". \"The ultimate beginner\'s guide to [topic].\" \"Want [result]? I show you how to achieve it in 5 steps.\" # Use practical tips such as: \"With these 5 tips you\'ll get [result].\" \"Five innovative ways to use [product] in your daily life.\" # Educational content: \"The most common mistakes and how to avoid them.\" \"Myths and truths about [topic].\" \"The latest trends you need to know about.\" # Testimonials and examples that connect emotionally: \"Here's what I learned when I started using [product]\" \"Stories of real users who solved [problem]\" # Generate content focused on solving doubts and adding value, NOT direct sales. Surprise me with your best ideas! #IMPORTANT: Always answers in AMERICAN SPANISH. " } async def chat_groq(self, message, history): messages = [self.system_prompt] for msg in history: messages.append({"role": "user", "content": str(msg[0])}) messages.append({"role": "assistant", "content": str(msg[1])}) messages.append({"role": "user", "content": str(message)}) response_content = '' stream = self.client.chat.completions.create( model="llama3-70b-8192", messages=messages, max_tokens=1024, temperature=1.3, stream=True ) for chunk in stream: content = chunk.choices[0].delta.content if content: response_content += chunk.choices[0].delta.content yield response_content def create_chat_interface(self): with gr.Blocks(theme=gr.themes.Monochrome(), fill_height=True) as demo: gr.ChatInterface(self.chat_groq, clear_btn=None, undo_btn=None, retry_btn=None, ) return demo if __name__ == "__main__": ai = ConversationalAI() demo = ai.create_chat_interface() demo.queue() demo.launch()