From 97283af23c1f12f55634213c1660c2c7dda1ca12 Mon Sep 17 00:00:00 2001 From: GabrielVasilescu04 Date: Fri, 20 Feb 2026 14:42:08 +0200 Subject: [PATCH] feat: add attachments support for coded conversational agents --- pyproject.toml | 2 +- samples/chat-hitl-agent/graph.py | 2 +- .../file-attachments-chat-agent/.env.example | 3 + samples/file-attachments-chat-agent/README.md | 42 +++++++++ .../file-attachments-chat-agent/agent.mermaid | 9 ++ .../langgraph.json | 5 ++ samples/file-attachments-chat-agent/main.py | 32 +++++++ .../pyproject.toml | 10 +++ .../file-attachments-chat-agent/uipath.json | 14 +++ src/uipath_langchain/chat/__init__.py | 8 +- src/uipath_langchain/chat/tools/__init__.py | 16 ++++ .../chat/tools/attachments.py | 90 +++++++++++++++++++ src/uipath_langchain/chat/{ => tools}/hitl.py | 0 uv.lock | 2 +- 14 files changed, 231 insertions(+), 4 deletions(-) create mode 100644 samples/file-attachments-chat-agent/.env.example create mode 100644 samples/file-attachments-chat-agent/README.md create mode 100644 samples/file-attachments-chat-agent/agent.mermaid create mode 100644 samples/file-attachments-chat-agent/langgraph.json create mode 100644 samples/file-attachments-chat-agent/main.py create mode 100644 samples/file-attachments-chat-agent/pyproject.toml create mode 100644 samples/file-attachments-chat-agent/uipath.json create mode 100644 src/uipath_langchain/chat/tools/__init__.py create mode 100644 src/uipath_langchain/chat/tools/attachments.py rename src/uipath_langchain/chat/{ => tools}/hitl.py (100%) diff --git a/pyproject.toml b/pyproject.toml index c24c28f80..7d22d6cc0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "uipath-langchain" -version = "0.7.11" +version = "0.7.12" description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform" readme = { file = "README.md", content-type = "text/markdown" } requires-python = ">=3.11" diff --git a/samples/chat-hitl-agent/graph.py b/samples/chat-hitl-agent/graph.py index 828fd7f45..0f93c361a 100644 --- a/samples/chat-hitl-agent/graph.py +++ b/samples/chat-hitl-agent/graph.py @@ -1,7 +1,7 @@ from langchain_anthropic import ChatAnthropic from langchain_tavily import TavilySearch from langchain.agents import create_agent -from uipath_langchain.chat import requires_approval +from uipath_langchain.chat.tools import requires_approval tavily_tool = TavilySearch(max_results=5) diff --git a/samples/file-attachments-chat-agent/.env.example b/samples/file-attachments-chat-agent/.env.example new file mode 100644 index 000000000..ae00c4096 --- /dev/null +++ b/samples/file-attachments-chat-agent/.env.example @@ -0,0 +1,3 @@ +UIPATH_ACCESS_TOKEN=YOUR TOKEN HERE +UIPATH_URL=https://alpha.uipath.com// +OPENAI_API_KEY=your_openai_api_key diff --git a/samples/file-attachments-chat-agent/README.md b/samples/file-attachments-chat-agent/README.md new file mode 100644 index 000000000..332f2449a --- /dev/null +++ b/samples/file-attachments-chat-agent/README.md @@ -0,0 +1,42 @@ +# File Attachments Chat Agent + +An AI assistant that reads and analyzes file attachments shared in the conversation. + +## Requirements + +- Python 3.11+ +- OpenAI API key + +## Installation + +```bash +uv venv -p 3.11 .venv +source .venv/bin/activate # On Windows: .venv\Scripts\activate +uv sync +``` + +Set your API key as an environment variable in .env + +```bash +OPENAI_API_KEY=your_openai_api_key +``` + +## Usage + +**1.** Upload the file to Orchestrator using the [attachments API](https://uipath.github.io/uipath-python/core/attachments/). + +**2.** Run the agent, passing the attachment ID and file metadata returned by the upload: + +```bash +uipath run agent '{ + "messages": [ + { + "type": "human", + "content": [ + { "type": "text", "text": "Summarize this document." }, + { "type": "text", "text": "[{\"id\": \"{orchestrator_attachment_id}\", \"full_name\": \"{file_name}\", \"mime_type\": \"{file_mime_type}\"}]" } + ] + } + ] +}' +``` diff --git a/samples/file-attachments-chat-agent/agent.mermaid b/samples/file-attachments-chat-agent/agent.mermaid new file mode 100644 index 000000000..495db26c9 --- /dev/null +++ b/samples/file-attachments-chat-agent/agent.mermaid @@ -0,0 +1,9 @@ +flowchart TB + __start__(__start__) + model(model) + tools(tools) + __end__(__end__) + __start__ --> model + model --> __end__ + model --> tools + tools --> model diff --git a/samples/file-attachments-chat-agent/langgraph.json b/samples/file-attachments-chat-agent/langgraph.json new file mode 100644 index 000000000..408405763 --- /dev/null +++ b/samples/file-attachments-chat-agent/langgraph.json @@ -0,0 +1,5 @@ +{ + "graphs": { + "agent": "./main.py:graph" + } +} diff --git a/samples/file-attachments-chat-agent/main.py b/samples/file-attachments-chat-agent/main.py new file mode 100644 index 000000000..5c88c7005 --- /dev/null +++ b/samples/file-attachments-chat-agent/main.py @@ -0,0 +1,32 @@ +from langchain.agents import create_agent +from langchain_openai import ChatOpenAI + +from uipath_langchain.chat.tools import AnalyzeAttachmentsTool + +system_prompt = """ +You are an AI assistant specialized in analyzing user-provided files using the available file analysis tool. +Always use the provided tool to read and analyze any uploaded or referenced file. Never guess or fabricate file contents. If a file is missing or inaccessible, ask the user to upload it again. + +When a file is received: + 1.Identify the file type. + 2.Provide a clear, concise summary. + 3.Extract key information relevant to the user’s request. + 4.Highlight important patterns, issues, or insights when applicable. + 5.If the user’s request is unclear, ask a focused clarification question before proceeding. + +For follow-up questions: + 1.Base all answers strictly on the file contents. + 2.Maintain context across the conversation. + 3.Perform deeper analysis, comparisons, transformations, or extractions as requested. + 4.Clearly distinguish between observed facts and inferred insights. If something cannot be determined from the file, state that explicitly. + +Keep responses structured, concise, and professional. Treat all file data as sensitive and do not retain or reuse it outside the current conversation. +""" + +llm = ChatOpenAI(model="gpt-4.1") + +graph = create_agent( + llm, + tools=[AnalyzeAttachmentsTool(llm=llm)], + system_prompt=system_prompt, +) diff --git a/samples/file-attachments-chat-agent/pyproject.toml b/samples/file-attachments-chat-agent/pyproject.toml new file mode 100644 index 000000000..b70c779ca --- /dev/null +++ b/samples/file-attachments-chat-agent/pyproject.toml @@ -0,0 +1,10 @@ +[project] +name = "file-attachments-chat-agent" +version = "0.0.1" +description = "file-attachments-chat-agent" +authors = [{ name = "John Doe", email = "john.doe@myemail.com" }] +dependencies = [ + "langchain-openai>=1.1.9", + "uipath-langchain", +] +requires-python = ">=3.11" diff --git a/samples/file-attachments-chat-agent/uipath.json b/samples/file-attachments-chat-agent/uipath.json new file mode 100644 index 000000000..7969b8f00 --- /dev/null +++ b/samples/file-attachments-chat-agent/uipath.json @@ -0,0 +1,14 @@ +{ + "$schema": "https://cloud.uipath.com/draft/2024-12/uipath", + "runtimeOptions": { + "isConversational": true + }, + "packOptions": { + "fileExtensionsIncluded": [], + "filesIncluded": [], + "filesExcluded": [], + "directoriesExcluded": [], + "includeUvLock": true + }, + "functions": {} +} diff --git a/src/uipath_langchain/chat/__init__.py b/src/uipath_langchain/chat/__init__.py index ea2d8943f..c78219e57 100644 --- a/src/uipath_langchain/chat/__init__.py +++ b/src/uipath_langchain/chat/__init__.py @@ -26,9 +26,13 @@ def __getattr__(name): return UiPathChatOpenAI if name == "requires_approval": - from .hitl import requires_approval + from .tools.hitl import requires_approval return requires_approval + if name == "AnalyzeAttachmentsTool": + from .tools.attachments import AnalyzeAttachmentsTool + + return AnalyzeAttachmentsTool if name in ("OpenAIModels", "BedrockModels", "GeminiModels"): from . import supported_models @@ -50,4 +54,6 @@ def __getattr__(name): "requires_approval", "LLMProvider", "APIFlavor", + "requires_approval", + "AnalyzeAttachmentsTool", ] diff --git a/src/uipath_langchain/chat/tools/__init__.py b/src/uipath_langchain/chat/tools/__init__.py new file mode 100644 index 000000000..cdbbf91d0 --- /dev/null +++ b/src/uipath_langchain/chat/tools/__init__.py @@ -0,0 +1,16 @@ +def __getattr__(name): + if name == "AnalyzeAttachmentsTool": + from .attachments import AnalyzeAttachmentsTool + + return AnalyzeAttachmentsTool + if name == "requires_approval": + from .hitl import requires_approval + + return requires_approval + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") + + +__all__ = [ + "AnalyzeAttachmentsTool", + "requires_approval", +] diff --git a/src/uipath_langchain/chat/tools/attachments.py b/src/uipath_langchain/chat/tools/attachments.py new file mode 100644 index 000000000..56266af5a --- /dev/null +++ b/src/uipath_langchain/chat/tools/attachments.py @@ -0,0 +1,90 @@ +"""Attachment resolution for conversational agents.""" + +from typing import Any + +from langchain_core.language_models import BaseChatModel +from langchain_core.tools import StructuredTool +from uipath.agent.models.agent import ( + AgentInternalAnalyzeFilesToolProperties, + AgentInternalToolResourceConfig, +) + +from uipath_langchain.agent.tools.internal_tools.analyze_files_tool import ( + create_analyze_file_tool, +) + +_ANALYZE_ATTACHMENTS_NAME = "analyze attachments" +_ANALYZE_ATTACHMENTS_DESCRIPTION = ( + "Read and interpret the content of file attachments provided by the user. " + "Call this when you see a tag in a user message, passing " + "the attachment objects from inside the tag and a query describing what you " + "want to know about or do with the files." +) +_ANALYZE_ATTACHMENTS_INPUT_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "analysisTask": { + "type": "string", + "description": "What you want to know about or do with the files.", + }, + "attachments": { + "type": "array", + "description": "The attachment objects from inside the tag.", + "items": { + "type": "object", + "properties": { + "ID": { + "type": "string", + "description": "The unique identifier of the attachment.", + }, + "FullName": { + "type": "string", + "description": "The full name of the attachment file.", + }, + "MimeType": { + "type": "string", + "description": "The MIME type of the attachment.", + }, + }, + "required": ["ID", "FullName", "MimeType"], + }, + }, + }, + "required": ["analysisTask", "attachments"], +} +_ANALYZE_ATTACHMENTS_OUTPUT_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "analysisResult": { + "type": "string", + "description": "The result of analyzing the file attachments.", + }, + }, + "required": ["analysisResult"], +} + + +def AnalyzeAttachmentsTool(llm: BaseChatModel) -> StructuredTool: + """Tool that reads and interprets file attachments using the provided LLM. + + The tool downloads each attachment, passes the file content to a non-streaming + copy of the provided LLM for interpretation, and returns the result as text. + This keeps multimodal content out of the agent's message state — the original + ```` metadata in HumanMessages is never modified. + + Example:: + + from langchain_openai import ChatOpenAI + from uipath_langchain.chat import AnalyzeAttachmentsTool + + llm = ChatOpenAI(model="gpt-4.1") + tool = AnalyzeAttachmentsTool(llm=llm) + """ + resource = AgentInternalToolResourceConfig( + name=_ANALYZE_ATTACHMENTS_NAME, + description=_ANALYZE_ATTACHMENTS_DESCRIPTION, + input_schema=_ANALYZE_ATTACHMENTS_INPUT_SCHEMA, + output_schema=_ANALYZE_ATTACHMENTS_OUTPUT_SCHEMA, + properties=AgentInternalAnalyzeFilesToolProperties(), + ) + return create_analyze_file_tool(resource, llm) diff --git a/src/uipath_langchain/chat/hitl.py b/src/uipath_langchain/chat/tools/hitl.py similarity index 100% rename from src/uipath_langchain/chat/hitl.py rename to src/uipath_langchain/chat/tools/hitl.py diff --git a/uv.lock b/uv.lock index fe62cedd1..c98ea6554 100644 --- a/uv.lock +++ b/uv.lock @@ -3324,7 +3324,7 @@ wheels = [ [[package]] name = "uipath-langchain" -version = "0.7.11" +version = "0.7.12" source = { editable = "." } dependencies = [ { name = "httpx" },