I am watching the youtube tutorial on LangChain. In the agent and tools section, I used ChatGroq class to use Groq models instead of using OpenAI model and I got an Runtime error. The rest of the codes remain same. Here is the youtube video link https://youtu.be/yF9kGESAi3M Here is my code: from dotenv import load_dotenv from langchain import hub from langchain.agents import AgentExecutor, create_react_agent from langchain_core.tools import Tool from langchain_groq import ChatGroq # Load environment variables from .env file load_dotenv() # Define a very simple tool function that returns the current time def get_current_time(*args, **kwargs): """Returns the current time in H:MM AM/PM format.""" import datetime # Import datetime module to get current time now = datetime.datetime.now() # Get current time return now.strftime("%I:%M %p") # Format time in H:MM AM/PM format # List of tools available to the agent tools = [ Tool( name="Time", # Name of the tool func=get_current_time, # Function that the tool will execute # Description of the tool description="Useful for when you need to know the current time", ), ] # Pull the prompt template from the hub # ReAct = Reason and Action # https://smith.langchain.com/hub/hwchase17/react prompt = hub.pull("hwchase17/react") # Initialize a ChatOpenAI model llm = ChatGroq(model="llama3-groq-8b-8192-tool-use-preview", temperature=0) # Create the ReAct agent using the create_react_agent function agent = create_react_agent( llm=llm, tools=tools, prompt=prompt, stop_sequence=True, ) # Create an agent executor from the agent and tools agent_executor = AgentExecutor.from_agent_and_tools( agent=agent, tools=tools, verbose=True, ) # Run the agent with a test query response = agent_executor.invoke({"input": "What time is it?"}) # Print the response from the agent print("response:", response)