Advanced usage
Custom Response
PandasAI offers the flexibility to handle chat responses in a customized manner. By default, PandasAI includes a ResponseParser class that can be extended to modify the response output according to your needs.
You have the option to provide a custom parser, such as StreamlitResponse
, to the configuration object like this:
Example Usage
import os
import pandas as pd
from pandasai import SmartDatalake
from pandasai.responses.response_parser import ResponseParser
# This class overrides default behaviour how dataframe is returned
# By Default PandasAI returns the SmartDataFrame
class PandasDataFrame(ResponseParser):
def __init__(self, context) -> None:
super().__init__(context)
def format_dataframe(self, result):
# Returns Pandas Dataframe instead of SmartDataFrame
return result["value"]
employees_df = pd.DataFrame(
{
"EmployeeID": [1, 2, 3, 4, 5],
"Name": ["John", "Emma", "Liam", "Olivia", "William"],
"Department": ["HR", "Sales", "IT", "Marketing", "Finance"],
}
)
salaries_df = pd.DataFrame(
{
"EmployeeID": [1, 2, 3, 4, 5],
"Salary": [5000, 6000, 4500, 7000, 5500],
}
)
# By default, unless you choose a different LLM, it will use BambooLLM.
# You can get your free API key signing up at https://pandabi.ai (you can also configure it in your .env file)
os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY"
agent = SmartDatalake(
[employees_df, salaries_df],
config={"llm": llm, "verbose": True, "response_parser": PandasDataFrame},
)
response = agent.chat("Return a dataframe of name against salaries")
# Returns the response as Pandas DataFrame
Streamlit Example
import os
import pandas as pd
from pandasai import SmartDatalake
from pandasai.responses.streamlit_response import StreamlitResponse
employees_df = pd.DataFrame(
{
"EmployeeID": [1, 2, 3, 4, 5],
"Name": ["John", "Emma", "Liam", "Olivia", "William"],
"Department": ["HR", "Sales", "IT", "Marketing", "Finance"],
}
)
salaries_df = pd.DataFrame(
{
"EmployeeID": [1, 2, 3, 4, 5],
"Salary": [5000, 6000, 4500, 7000, 5500],
}
)
# By default, unless you choose a different LLM, it will use BambooLLM.
# You can get your free API key signing up at https://pandabi.ai (you can also configure it in your .env file)
os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY"
agent = SmartDatalake(
[employees_df, salaries_df],
config={"verbose": True, "response_parser": StreamlitResponse},
)
agent.chat("Plot salaries against name")
Was this page helpful?