Spaces:
Running
Running
| from dotenv import load_dotenv | |
| from langgraph.graph import StateGraph, START, END | |
| from typing import TypedDict, Annotated, List | |
| from langchain_core.messages import HumanMessage, BaseMessage, AIMessage, SystemMessage | |
| from langgraph.checkpoint.memory import MemorySaver | |
| from langgraph.graph.message import add_messages | |
| from scripts.load_model import get_model | |
| load_dotenv() | |
| SYSTEM_MESSAGE = SystemMessage( | |
| content=( | |
| "You are Cortex, a coding assistant created by Junaid, helping with technical and coding tasks. " | |
| "Answer clearly, stay focused on coding and technical solutions, and keep responses concise." | |
| ) | |
| ) | |
| llm = get_model() | |
| class ChatState(TypedDict): | |
| messages: Annotated[list[BaseMessage], add_messages] | |
| def chat_node(state: ChatState): | |
| messages = [SYSTEM_MESSAGE] + state["messages"] | |
| response = llm.invoke(messages) | |
| return {"messages": [response]} | |
| checkpointer = MemorySaver() | |
| graph = StateGraph(ChatState) | |
| graph.add_node("chat_node", chat_node) | |
| graph.add_edge(START, "chat_node") | |
| graph.add_edge("chat_node", END) | |
| cortex_chatbot = graph.compile(checkpointer=checkpointer) | |
| def stream_chat_response(user_message: str, thread_id: str): | |
| config = {"configurable": {"thread_id": thread_id}} | |
| for chunk in cortex_chatbot.stream( | |
| {"messages": [HumanMessage(content=user_message)]}, | |
| config=config, | |
| stream_mode="messages" | |
| ): | |
| yield chunk |