From c3d90cc7a1fe9bef944b9ffccd784401dff5a050 Mon Sep 17 00:00:00 2001 From: Mark Hulbert <39801222+m-hulbert@users.noreply.github.com> Date: Mon, 12 Jan 2026 15:24:03 +0100 Subject: [PATCH] Add an overview page for AI Transport --- src/data/nav/aitransport.ts | 11 +- src/pages/docs/ai-transport/index.mdx | 141 +++++++++++++++++++++++++- 2 files changed, 142 insertions(+), 10 deletions(-) diff --git a/src/data/nav/aitransport.ts b/src/data/nav/aitransport.ts index 1a4c7a0db0..dbfddf7cb5 100644 --- a/src/data/nav/aitransport.ts +++ b/src/data/nav/aitransport.ts @@ -9,14 +9,9 @@ export default { }, content: [ { - name: 'Introduction', - pages: [ - { - name: 'About AI Transport', - link: '/docs/ai-transport', - index: true, - }, - ], + name: 'About AI Transport', + link: '/docs/ai-transport', + index: true, }, { name: 'Token streaming', diff --git a/src/pages/docs/ai-transport/index.mdx b/src/pages/docs/ai-transport/index.mdx index fb2f2b271e..0c906e9340 100644 --- a/src/pages/docs/ai-transport/index.mdx +++ b/src/pages/docs/ai-transport/index.mdx @@ -1,6 +1,143 @@ --- title: About AI Transport +intro: "Ably AI Transport is a drop-in infrastructure layer that upgrades your AI streams into bi-directional, stateful experiences. It enables you to build multi-device, steerable AI applications that are agent agnostic, incredibly resilient and highly scalable." meta_description: "Learn more about Ably's AI Transport and the features that enable you to quickly build functionality into new and existing applications." -redirect_from: - - /docs/products/ai-transport --- + +AI Transport enables you to add a realtime delivery layer to your application, providing the infrastructure required to deliver modern, stateful AI experiences to users. It works seamlessly with any AI model or framework, such as OpenAI, Anthropic, Vercel or LangChain. + +AI Transport runs on Ably's [fault-tolerant](/docs/platform/architecture/fault-tolerance) and highly-available platform. The platform enables data to be streamed between all internet-connected devices at [low latencies](/docs/platform/architecture/latency) across the globe. Its elastic global infrastructure delivers enterprise-scale messaging that [effortlessly scales](/docs/platform/architecture/platform-scalability) to meet demand. + +Drop AI Transport into your applications to transform them into modern, bi-directional AI experiences that keep users engaged. AI Transport provides the building blocks to deliver reliable, resumable token streams with robust session management and state hydration to always keep your users and agents in sync. + +![Before and after adding AI Transport](../../../images/content/diagrams/ai-transport-before-and-after.png) + +## Get started + +Start learning the basics of AI Transport right away with a getting started guide using your agent and framework of choice: + +### OpenAI + + +{[ + { + title: 'Message-per-response', + description: 'Stream OpenAI responses using message appends', + image: 'icon-tech-javascript', + link: '/docs/guides/ai-transport/openai-message-per-response', + }, + { + title: 'Message-per-token', + description: 'Stream OpenAI responses using individual token messages', + image: 'icon-tech-javascript', + link: '/docs/guides/ai-transport/openai-message-per-token', + }, +]} + + +### Anthropic + + +{[ + { + title: 'Message-per-response', + description: 'Stream Anthropic responses using message appends', + image: 'icon-tech-javascript', + link: '/docs/guides/ai-transport/anthropic-message-per-response', + }, + { + title: 'Message-per-token', + description: 'Stream Anthropic responses using individual token messages', + image: 'icon-tech-javascript', + link: '/docs/guides/ai-transport/anthropic-message-per-token', + }, +]} + + + +## Features + +AI Transport provides a range of features built on Ably's highly-scalable realtime platform to enable you to deliver reliable, stateful AI experiences that provide the first-class UX your users expect from modern applications. + +### Token streaming + +Token streaming is the core of how LLMs deliver their responses to users. Tokens are progressively streamed to users from your LLM so that users don't need to wait for a complete response before seeing any output. + +Using AI Transport, your token streams are reliable and persistent. They survive modern environments where users change browser tabs, refresh the page or switch devices, and common interruptions such as temporary network loss. Your users can always reconnect and continue where they left off without having to start over. + +[Read more about token streaming](/docs/ai-transport/features/token-streaming). + +### Bi-directional communication + +AI Transport supports rich, bi-directional communication patterns between users and agents. + +Build sophisticated AI experiences with features like accepting user input for interactive conversations, streaming chain-of-thought reasoning for transparency, attaching citations to responses for verifiability, implementing human-in-the-loop workflows for sensitive operations, and exposing tool calls for generative UI and visibility. + +These messaging features work seamlessly with token streaming to create complete, interactive AI experiences. + +[Read more about messaging features](/docs/ai-transport/features/messaging). + +### Durable sessions + +AI Transport enables durable sessions that persist beyond the lifetime of individual connections, allowing users and agents to connect and disconnect independently. + +Communication shouldn't be tied to the connection state of either party. If a user goes offline or their connection drops, they should be able to continue their session without losing context. AI Transport provides robust session management by enabling users and agents to connect independently of one another. + +Your users can start a conversation on their mobile and seamlessly continue it on their desktop. Similarly, multiple users can participate in the same conversation with a single agent and they will all remain in sync, in realtime. + +[Read more about sessions and identity](/docs/ai-transport/features/sessions-identity). + +### Automatic catch-up + +AI Transport enables clients to hydrate conversation and session state from the channel, including message history and in-progress responses. + +Whether a user is briefly disconnected when they drive through a tunnel, or they're rejoining a conversation the following day of work, AI Transport allows clients to resynchronise the full conversation state, including both historical messages and in-progress responses. Your users are always up to date with the full conversation, in order, anywhere. + +[Read more about client hydration](/docs/ai-transport/features/token-streaming/message-per-response#hydration). + +### Background processing + +AI Transport allows agents to process jobs in the background while users go offline, with full awareness of their online status through realtime presence tracking. + +Users can work asynchronously by prompting an agent to perform a task without having to monitor its progress. They can go offline and receive a push notification when the agent has completed the task, or reconnect at any time to seamlessly resume and see all progress made while they were away using [state hydration](#hydration). + +It also puts you in control of how you manage your application when there aren't any users online. For example, you can choose whether to pause a conversation when a user exits their browser tab, or allow the agent to complete its response for the user to view when they return. + +[Read more about status-aware cost controls](/docs/ai-transport/features/sessions-identity/online-status). + +### Enterprise controls + +Ably's platform provides [integrations](/docs/platform/integrations) and functionality to ensure that your applications always exceed the requirements of enterprise environments. Whether that's [message auditing](/docs/platform/integrations/streaming), [client identification](/docs/auth/identified-clients) or [fine-grained authorization](/docs/auth/capabilities). + +## Examples + +Take a look at some example code running in-browser of the sorts of features you can build with AI Transport underpinning your applications: + + +{[ + { + title: 'Message per response streaming', + description: 'Stream individual tokens from AI models into a single message.', + image: 'icon-tech-javascript', + link: '/examples/ai-transport-message-per-response?lang=javascript', + }, + { + title: 'Message per response streaming', + description: 'Stream individual tokens from AI models into a single message.', + image: 'icon-tech-react', + link: '/examples/ai-transport-message-per-response?lang=react', + }, + { + title: 'Message per token streaming', + description: 'Stream individual tokens from AI models as separate messages.', + image: 'icon-tech-javascript', + link: '/examples/ai-transport-message-per-token?lang=javascript', + }, + { + title: 'Message per token streaming', + description: 'Stream individual tokens from AI models as separate messages.', + image: 'icon-tech-react', + link: '/examples/ai-transport-message-per-token?lang=react', + }, +]} +