Final App
We will use OpenAI's API to give our chatbot some intelligence.
Configure the OpenAI API Key
Ensure you have an active OpenAI subscription. Save your API key as an environment variable named OPENAI_API_KEY
:
```bash export OPENAI_API_KEY="your-api-key-here" ```
Using the API
We need to modify our event handler to send a request to the API.
# state.py import os from openai import AsyncOpenAI async def answer(self): # Our chatbot has some brains now! client = AsyncOpenAI( api_key=os.environ["OPENAI_API_KEY"] ) session = await client.chat.completions.create( model="gpt-4o-mini", messages=[ {"role": "user", "content": self.question} ], stop=None, temperature=0.7, stream=True, ) # Add to the answer as the chatbot responds. answer = "" self.chat_history.append((self.question, answer)) # Clear the question input. self.question = "" # Yield here to clear the frontend input before continuing. yield async for item in session: if hasattr(item.choices[0].delta, "content"): if item.choices[0].delta.content is None: # presence of 'None' indicates the end of the response break answer += item.choices[0].delta.content self.chat_history[-1] = ( self.chat_history[-1][0], answer, ) yield
Finally, we have our chatbot!
Final Code
We wrote all our code in three files, which you can find below.
# chatapp.py import reflex as rx from chatapp import style from chatapp.state import State def qa(question: str, answer: str) -> rx.Component: return rx.box( rx.box( rx.text(question, style=style.question_style), text_align="right", ), rx.box( rx.text(answer, style=style.answer_style), text_align="left", ), margin_y="1em", ) def chat() -> rx.Component: return rx.box( rx.foreach( State.chat_history, lambda messages: qa(messages[0], messages[1]), ) ) def action_bar() -> rx.Component: return rx.hstack( rx.input( value=State.question, placeholder="Ask a question", on_change=State.set_question, style=style.input_style, ), rx.button( "Ask", on_click=State.answer, style=style.button_style, ), ) def index() -> rx.Component: return rx.center( rx.vstack( chat(), action_bar(), align="center", ) ) app = rx.App() app.add_page(index)
# state.py import os from openai import AsyncOpenAI import reflex as rx class State(rx.State): # The current question being asked. question: str # Keep track of the chat history as a list of (question, answer) tuples. chat_history: list[tuple[str, str]] async def answer(self): # Our chatbot has some brains now! client = AsyncOpenAI( api_key=os.environ["OPENAI_API_KEY"] ) session = await client.chat.completions.create( model="gpt-4o-mini", messages=[ {"role": "user", "content": self.question} ], stop=None, temperature=0.7, stream=True, ) # Add to the answer as the chatbot responds. answer = "" self.chat_history.append((self.question, answer)) # Clear the question input. self.question = "" # Yield here to clear the frontend input before continuing. yield async for item in session: if hasattr(item.choices[0].delta, "content"): if item.choices[0].delta.content is None: # presence of 'None' indicates the end of the response break answer += item.choices[0].delta.content self.chat_history[-1] = ( self.chat_history[-1][0], answer, ) yield
# style.py import reflex as rx # Common styles for questions and answers. shadow = "rgba(0, 0, 0, 0.15) 0px 2px 8px" chat_margin = "20%" message_style = dict( padding="1em", border_radius="5px", margin_y="0.5em", box_shadow=shadow, max_width="30em", display="inline-block", ) # Set specific styles for questions and answers. question_style = message_style | dict( margin_left=chat_margin, background_color=rx.color("gray", 4), ) answer_style = message_style | dict( margin_right=chat_margin, background_color=rx.color("accent", 8), ) # Styles for the action bar. input_style = dict( border_width="1px", padding="1em", box_shadow=shadow, width="350px", ) button_style = dict( background_color=rx.color("accent", 10), box_shadow=shadow, )
Next Steps
Congratulations! You have built your first chatbot. From here, you can read through the rest of the documentations to learn about Reflex in more detail. The best way to learn is to build something, so try to build your own app using this as a starting point!
One More Thing
With our hosting service, you can deploy this app with a single command within minutes. Check out our Hosting Quick Start.