Spaces:
Sleeping
Sleeping
File size: 2,078 Bytes
203b254 47d9cd0 203b254 637fd0b 203b254 637fd0b 47d9cd0 637fd0b 47d9cd0 637fd0b 203b254 47d9cd0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
import streamlit as st
import json
from src.langgraphagenticai.ui.streamlit.display_result import DisplayResultStreamlit
from src.langgraphagenticai.ui.streamlit.load_ui import LoadStreamlitUI
from src.langgraphagenticai.llms.groq_llm import GroqChatLLM
from src.langgraphagenticai.graph.graph_builder import GraphBuilder
# MAIN function start
def load_langgraph_agenticai_app():
"""
Loads and runs the LangGraph AgenticAI application with Streamlit UI.
This function initializes the UI, handles user input, configures the
LLM model, sets up the graph based on the selected use case, and displays
the output while implementing exception handling for robustness.
"""
# Load UI
ui = LoadStreamlitUI()
user_input = ui.load_streamlit_ui()
if not user_input:
st.error("Error: Failed to load user input from the UI")
return
# Text input for user message
if st.session_state.IsFetchButtonClick:
user_message = st.session_state.timeframe
else:
user_message = st.chat_input("Enter your message")
if user_message:
try:
# Configure LLM
obj_llm_config = GroqChatLLM(user_controls_input=user_input)
model = obj_llm_config.get_llm_model()
if not model:
st.error("Error: LLM model could not be initialized")
return
# Initialize and set up the graph based on use case
usecase = user_input.get('selected_usecase')
if not usecase:
st.error("Error: No use case selected.")
return
## Graph builder
graph_builder = GraphBuilder(model)
try:
graph = graph_builder.setup_graph(usecase)
DisplayResultStreamlit(usecase, graph, user_message).display_result_in_ui()
except Exception as e:
st.error(f"Error: Graph setup failed {e}")
return
except Exception as e:
raise ValueError(f"Error Occurred with Exception: {e}") |