Addyk24 commited on
Commit
7073cc2
·
verified ·
1 Parent(s): acb8ef2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +127 -7
app.py CHANGED
@@ -1,3 +1,12 @@
 
 
 
 
 
 
 
 
 
1
  import os
2
  import gradio as gr
3
  from langchain.chat_models import ChatOpenAI
@@ -6,10 +15,124 @@ from langchain.memory import ConversationBufferMemory
6
 
7
  OPENAI_API_KEY=os.getenv('OPENAI_API_KEY')
8
 
9
- template = """You are a helpful assistant to answer all user queries.
10
- {chat_history}
11
- User: {user_message}
12
- Chatbot:"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
  prompt = PromptTemplate(
15
  input_variables=["chat_history", "user_message"], template=template
@@ -24,9 +147,6 @@ llm_chain = LLMChain(
24
  memory=memory,
25
  )
26
 
27
- def get_text_response(user_message,history):
28
- response = llm_chain.predict(user_message = user_message)
29
- return response
30
 
31
  demo = gr.ChatInterface(get_text_response)
32
 
 
1
+ import os
2
+ import gradio as gr
3
+ from langchain.chat_models import ChatOpenAI
4
+ from langchain import LLMChain, PromptTemplate
5
+ from langchain.memory import ConversationBufferMemory
6
+ %pip install langchain_google_genai load_dotenv crewai crewai_tools langchain_community langchain sentence-transformers langchain-groq langchain_huggingface --quiet openai gradio huggingface_hub
7
+
8
+
9
+
10
  import os
11
  import gradio as gr
12
  from langchain.chat_models import ChatOpenAI
 
15
 
16
  OPENAI_API_KEY=os.getenv('OPENAI_API_KEY')
17
 
18
+ from crewai import Agent, Task, Crew
19
+
20
+ from google.colab import userdata
21
+ from dotenv import load_dotenv
22
+ from langchain_openai import ChatOpenAI
23
+ import os
24
+ load_dotenv()
25
+
26
+ from crewai import Agent, Task, Crew
27
+ from crewai import LLM
28
+ os.environ['GEMINI_API_KEY'] = userdata.get('Gemini_Api')
29
+ llm = LLM(model="gemini/gemini-1.5-flash")
30
+ # os.environ['GEMINI_API_KEY'] = userdata.get('GEMINI_API_KEY')
31
+ os.environ['GROQ_API_KEY']=userdata.get('GROQ_API')
32
+
33
+
34
+ import crewai_tools
35
+ from crewai import tools
36
+
37
+
38
+ from crewai_tools import SerperDevTool
39
+ tool = SerperDevTool()
40
+
41
+
42
+ scrapper_agent = Agent(
43
+ role="Senior Scrapper Representative",
44
+ goal="Be the most friendly and helpful "
45
+ "Scrapper representative in your team to scrape information inputted by user of query {query} ",
46
+ backstory=(
47
+ "You have scrapped many information inputted by user of query {query} and "
48
+ "you are good and perfect at it and makes this task easy "
49
+ "You need to make sure that you provide the best support!"
50
+ "Make sure to provide full complete answers, "
51
+ " and make no assumptions."
52
+ ),
53
+ allow_delegation=False,
54
+ llm=llm,
55
+ verbose=True
56
+ )
57
+ Provider_agent = Agent(
58
+ role="Senior information Provider Representative",
59
+ goal="Be the most friendly and helpful "
60
+ "information provider in your team to provide the information scrapped from web browser",
61
+ backstory=(
62
+ "You have provided many information that were scrapped by other agent from web browser and "
63
+ "you are good and perfect at it and makes this task easy "
64
+ "You need to make sure that you provide the best support!"
65
+ "Make sure to provide full complete answers, "
66
+ " and make no assumptions."
67
+ ),
68
+ allow_delegation=False,
69
+ llm=llm,
70
+ verbose=True
71
+ )
72
+
73
+ scrapper_task = Task(
74
+ description=(
75
+ "user just reached out with a super important task"
76
+ "to scrape information from web browser of query {query} "
77
+ "Make sure to use everything you know "
78
+ "to provide the best support possible."
79
+ "You must strive to provide a complete "
80
+ "and accurate response to the user's query."
81
+ ),
82
+ expected_output=(
83
+ "A detailed, informative response to the "
84
+ "user's query that addresses "
85
+ "all aspects of their question.\n"
86
+ "The response should include references "
87
+ "to everything you used to find the answer, "
88
+ "including external data or solutions. "
89
+ "Ensure the answer is complete, "
90
+ "leaving no questions unanswered, and maintain a helpful and friendly "
91
+ "tone throughout."
92
+ ),
93
+ tools=[tool],
94
+ agent=scrapper_agent,
95
+ )
96
+ Provider_task = Task(
97
+ description=(
98
+ "Your task is to make proper documented information that are scrapped from other agent "
99
+ "Make sure to use everything you know "
100
+ "to provide the best support possible."
101
+ "You must strive to provide a complete "
102
+ "and accurate response to the user's query."
103
+ ),
104
+ expected_output=(
105
+ "A detailed, informative response to the "
106
+ "user's query that addresses and make it well and perfect dcumented to easily readable "
107
+ "all aspects of their question.\n"
108
+ "The response should include references "
109
+ "to everything you used to find the answer, "
110
+ "including external data or solutions. "
111
+ "Ensure the answer is complete, "
112
+ "leaving no questions unanswered, and maintain a helpful and friendly "
113
+ "tone throughout."
114
+ ),
115
+ agent=Provider_agent,
116
+ )
117
+
118
+ crew = Crew(
119
+ agents=[scrapper_agent, Provider_agent],
120
+ tasks=[scrapper_task, Provider_task],
121
+ verbose=True
122
+ )
123
+
124
+ # inputs = {
125
+ # "query": input("Enter your query: "),
126
+ # # "url": input("Enter which source to use for query: ")
127
+ # }
128
+ # result = crew.kickoff(inputs=inputs)
129
+ def get_text_response(message, history):
130
+ result = crew.kickoff(inputs={"query": message})
131
+ return result.raw
132
+ # from IPython.display import Markdown
133
+ # Markdown(result.raw)
134
+
135
+ # demo = gr.ChatInterface(get_text_response, examples=["How are you doing?","What are your interests?","Which places do you like to visit?"])
136
 
137
  prompt = PromptTemplate(
138
  input_variables=["chat_history", "user_message"], template=template
 
147
  memory=memory,
148
  )
149
 
 
 
 
150
 
151
  demo = gr.ChatInterface(get_text_response)
152