File size: 2,430 Bytes
0d96934
 
 
 
 
 
 
 
122e09d
0d96934
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
from langchain.llms import OpenAI
import os
import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.prompts.chat import ChatPromptTemplate
from langchain.schema import BaseOutputParser, HumanMessage, SystemMessage, AIMessage


chat_llm = ChatOpenAI(temperature=0.5, model='gpt-3.5-turbo')

class OutputParser(BaseOutputParser):
    def parse(self, text:str):
        return text


system_template = "You are an IELTS academic trainer. User is preparing for it's IELTS test. When user asks for an idiom, you have to give idiom with it's meaning and also give usecases with sentences."


# function to call openai api and get response
def get_response(question):
    template = "You are an IELTS academic trainer. User is preparing for it's IELTS test. When user asks for an idiom, you have to give different idiom with it's meaning and also give usecases with sentences."
    human_template = "{text}"
    chatprompt = ChatPromptTemplate.from_messages([
                ('system', template),
                ('human', human_template)])

    chain = chatprompt|chat_llm|OutputParser()

    response = chain.invoke({"text":question})
    
    return response

if 'flowmessages' not in st.session_state:
    st.session_state['flowmessages'] = [SystemMessage(content=system_template)]

def get_chatresponse(question):

    # to save context of previous messages
    st.session_state['flowmessages'].append(HumanMessage(content=question))
    answer = chat_llm(st.session_state['flowmessages'])
    st.session_state['flowmessages'].append(AIMessage(content=answer.content))

    return answer.content


# create streamlit app
st.set_page_config(page_title="Langchain Application")

st.header("Your Idiom Generator")

generate = st.button("Generate an Idiom")

try:
    response = get_chatresponse("Give me an idiom.")
except:
    st.warning('OpenAI API is not working at the moment.', icon='⚠')
    response = '''
        I will still give you an idiom.

        "Running out of money"

        Meaning: This idiom describes the situation where someone exhausts their available funds or financial resources, having spent most or all of their money and having none left.

        Example sentences:

        "Owner of the app is running out of money right now. Come back after a decade. He is hoping to be a rich by then."
            '''

if generate:
    st.subheader("Response: ")
    st.write(response)