@@ -158,7 +158,7 @@ class HMSTranscription {
final String? peerName;
final bool isFinal;
-}
+}
```
### Step 3: To stop getting transcriptions, remove HMSTranscriptListener
@@ -180,7 +180,7 @@ class Meeting implements HMSUpdateListener,HMSTranscriptListener {
## How to start/stop transcriptions
-Transcriptions can only be started or stopped by the peer having admin permissions. Transcription permissions can be changed from [100ms dashboard](https://dashboard.100ms.live/).
+Transcriptions can only be started or stopped by the peer having admin permissions. Transcription permissions can be changed from [100ms dashboard](https://dashboard.100ms.live/).
HMSSDK provides `HMSTranscriptionController` to control transcription.
### Start Transcription
@@ -203,7 +203,7 @@ class Meeting implements HMSUpdateListener,HMSTranscriptListener {
HMSTranscriptionState.started) {
///Captions are enabled in the room
isTranscriptionEnabled = true;
- }
+ }
else if(room.transcriptions?[index].state ==
HMSTranscriptionState.stopped){
///Captions are disabled in the room
@@ -243,8 +243,7 @@ class Meeting implements HMSUpdateListener,HMSTranscriptListener {
```
After calling `startTranscription` method, `onRoomUpdate` will be fired with `HMSRoomUpdate.transcriptionsUpdated` event.
-First update will be with `HMSTranscriptionState.initialized` and then `HMSTranscriptionState.started` state.
-
+First update will be with `HMSTranscriptionState.initialized` and then `HMSTranscriptionState.started` state.
### Stop Transcription
@@ -268,7 +267,7 @@ class Meeting implements HMSUpdateListener,HMSTranscriptListener {
HMSTranscriptionState.started) {
///Captions are enabled in the room
isTranscriptionEnabled = true;
- }
+ }
else if(room.transcriptions?[index].state ==
HMSTranscriptionState.stopped){
///Captions are disabled in the room
@@ -305,5 +304,5 @@ class Meeting implements HMSUpdateListener,HMSTranscriptListener {
}
```
-After calling `stopTranscription` method, `onRoomUpdate` will be fired with `HMSRoomUpdate.transcriptionsUpdated` event.
-Update will be fired with `HMSTranscriptionState.stopped` state.
\ No newline at end of file
+After calling `stopTranscription` method, `onRoomUpdate` will be fired with `HMSRoomUpdate.transcriptionsUpdated` event.
+Update will be fired with `HMSTranscriptionState.stopped` state.
diff --git a/docs/ios/v2/how-to-guides/set-up-video-conferencing/captions.mdx b/docs/ios/v2/how-to-guides/set-up-video-conferencing/captions.mdx
index 6543dbdd6..91d86e6a0 100644
--- a/docs/ios/v2/how-to-guides/set-up-video-conferencing/captions.mdx
+++ b/docs/ios/v2/how-to-guides/set-up-video-conferencing/captions.mdx
@@ -1,14 +1,14 @@
---
-title: Live Transcription for Conferencing (Closed Captions - Beta)
+title: Live Transcription for Conferencing (Closed Captions)
nav: 4.99
---
-100ms real-time transcription engine generates a live transcript (closed captions) during a conferencing session.
+100ms real-time transcription engine generates a live transcript (closed captions) during a conferencing session.
The SDK provides a callback with the transcript for each peer when they speak.
## Minimum Requirements
-- Minimum 100ms SDK version required is 1.12.0
+- Minimum 100ms SDK version required is 1.12.0
## How to check if captions are started in a room?
@@ -36,11 +36,11 @@ Here is an example implemenation:
public func on(transcripts: HMSTranscripts) {
transcripts.transcripts.forEach { transcript in
let peerModel = transcript.peer
-
+
if !(lastTranscript?.isFinal ?? false) {
_ = self.transcriptArray.popLast()
}
-
+
if peerModel == lastTranscript?.peer {
self.transcriptArray += [" " + transcript.transcript]
}
@@ -54,14 +54,16 @@ Here is an example implemenation:
self.transcriptArray += ["\n**\(peerModel.name.trimmingCharacters(in: .whitespacesAndNewlines)):** "]
self.transcriptArray += ["\(transcript.transcript)"]
}
-
+
lastTranscript = transcript
}
}
```
## How to toggle Live Transcriptions on/off
+
You can toggle live transcriptions on/off at runtime that can help save costs. Use startTranscription() method to start the transcription and stopTranscription() method to stop transcription like below:
+
```swift
// Start Real Time Transcription
sdk.startTranscription() { success, error in
@@ -81,4 +83,3 @@ You can toggle live transcriptions on/off at runtime that can help save costs. U
}
}
```
-
diff --git a/docs/javascript/v2/how-to-guides/set-up-video-conferencing/captions.mdx b/docs/javascript/v2/how-to-guides/set-up-video-conferencing/captions.mdx
index 14f5c1ec3..8b9b3389d 100644
--- a/docs/javascript/v2/how-to-guides/set-up-video-conferencing/captions.mdx
+++ b/docs/javascript/v2/how-to-guides/set-up-video-conferencing/captions.mdx
@@ -1,5 +1,5 @@
---
-title: Live Transcription for Conferencing (Closed Captions - Beta)
+title: Live Transcription for Conferencing (Closed Captions)
nav: 1.24
---
@@ -39,11 +39,9 @@ The `useTranscript` hook is implemented with the `onTranscript` callback as show
## How can you check if closed captions are enabled in a room?
```ts
-
- import { selectIsTranscriptionEnabled, useHMSStore } from '@100mslive/react-sdk';
- // use this to check if caption is enabled for your room.
- const isCaptionPresent: boolean = useHMSStore(selectIsTranscriptionEnabled);
-
+import { selectIsTranscriptionEnabled, useHMSStore } from '@100mslive/react-sdk';
+// use this to check if caption is enabled for your room.
+const isCaptionPresent: boolean = useHMSStore(selectIsTranscriptionEnabled);
```
## How to toggle closed captions on or off?
@@ -51,24 +49,23 @@ The `useTranscript` hook is implemented with the `onTranscript` callback as show
Closed captions can be dynamically enabled or disabled at runtime within a given room, depending on user requirements. This capability helps minimize unnecessary usage costs by ensuring that captions are enabled only when explicitly needed by the user(s).
```ts
-
- // Currently 100ms supports closed captions type mode
- export declare enum HMSTranscriptionMode {
- CAPTION = "caption"
- }
-
- export interface TranscriptionConfig {
- mode: HMSTranscriptionMode;
- }
-
- // admin/host role need to startTranscription if he had the access, here is how you will check if you had access to start transcription
- const isTranscriptionAllowed = useHMSStore(selectIsTranscriptionAllowedByMode(HMSTranscriptionMode.CAPTION));
-
+// Currently 100ms supports closed captions type mode
+export declare enum HMSTranscriptionMode {
+ CAPTION = 'caption'
+}
+
+export interface TranscriptionConfig {
+ mode: HMSTranscriptionMode;
+}
+
+// admin/host role need to startTranscription if he had the access, here is how you will check if you had access to start transcription
+const isTranscriptionAllowed = useHMSStore(
+ selectIsTranscriptionAllowedByMode(HMSTranscriptionMode.CAPTION)
+);
```
Use `hmsActions.startTranscription()` method to start the closed captions.
-
```ts
async startCaption() {
try {
@@ -94,4 +91,4 @@ Use `hmsActions.stopTranscription()` method to stop closed captions.
console.log(err);
}
}
-```
\ No newline at end of file
+```
diff --git a/docs/react-native/v2/how-to-guides/extend-capabilities/live-captions.mdx b/docs/react-native/v2/how-to-guides/extend-capabilities/live-captions.mdx
index 6ffd884f7..d94eb5551 100644
--- a/docs/react-native/v2/how-to-guides/extend-capabilities/live-captions.mdx
+++ b/docs/react-native/v2/how-to-guides/extend-capabilities/live-captions.mdx
@@ -1,55 +1,49 @@
---
-title: Live Transcription for Conferencing (Closed Captions - Beta)
+title: Live Transcription for Conferencing (Closed Captions)
nav: 13.4
---
100ms' real-time transcription engine generates a live transcript (closed captions) during a conferencing session. The SDK provides a callback with transcript for each peer when they speak.
-
## Minimum Requirements
- Minimum `react-native-hms` version required is `1.10.7`
- Minimum `react-native-room-kit` version required is `1.2.0`
-
## Checking if captions are enabled in a room
To check if WebRTC (not hls) captions are enabled in a room. Look for any transcriptions being in a started state in the room data.
```js
-const captionsEnabled = (
- hmsInstance.getRoom()
- ?.transcriptions
- ?.some((transcription) => {
- return transcription.state === TranscriptionState.STARTED;
- })
- ) || false; // Using `false` as default
+const captionsEnabled =
+ hmsInstance.getRoom()?.transcriptions?.some((transcription) => {
+ return transcription.state === TranscriptionState.STARTED;
+ }) || false; // Using `false` as default
```
-
## How to implement captions?
Implement `fun onTranscripts(transcripts: HmsTranscripts)` in the `HMSUpdateListener` callback.
-
## Toggling Live Transcripts
+
To save on cost, live transcriptions can be disabled for everyone at runtime and toggled on again when required.
```js
// Start Real Time Transcription
try {
- await hmsInstance.startRealTimeTranscription()
+ await hmsInstance.startRealTimeTranscription();
} catch (error) {
- // Handle error occurred while starting Transcription
+ // Handle error occurred while starting Transcription
}
```
```js
// Stop Real Time Transcription
try {
- await hmsInstance.stopRealTimeTranscription()
+ await hmsInstance.stopRealTimeTranscription();
} catch (error) {
- // Handle error occurred while starting Transcription
+ // Handle error occurred while starting Transcription
}
```
@@ -71,24 +65,23 @@ When Live Transcripts are toggled for room, you get `TRANSCRIPTIONS_UPDATED` upd
```js
hmsInstance.addEventListener(
- HMSUpdateListenerActions.ON_ROOM_UPDATE,
- (data: { room: HMSRoom; type: HMSRoomUpdate; }) => {
-
- if (data.type === HMSRoomUpdate.TRANSCRIPTIONS_UPDATED) {
- // Handle Transcriptions Update like you may update UI if transcriptions were started or stopped
-
- const captionTranscription = data.room.transcriptions?.find(
- (transcription) => transcription.mode === TranscriptionsMode.CAPTION
- );
-
- if (captionTranscription?.state === TranscriptionState.STARTED) {
- // Transcriptions Started in Room
- } else if (captionTranscription?.state === TranscriptionState.STOPPED) {
- // Transcriptions Stopped in Room
- } else if (captionTranscription?.state === TranscriptionState.FAILED) {
- // Transcriptions failed to Start or Stop
- }
+ HMSUpdateListenerActions.ON_ROOM_UPDATE,
+ (data: { room: HMSRoom, type: HMSRoomUpdate }) => {
+ if (data.type === HMSRoomUpdate.TRANSCRIPTIONS_UPDATED) {
+ // Handle Transcriptions Update like you may update UI if transcriptions were started or stopped
+
+ const captionTranscription = data.room.transcriptions?.find(
+ (transcription) => transcription.mode === TranscriptionsMode.CAPTION
+ );
+
+ if (captionTranscription?.state === TranscriptionState.STARTED) {
+ // Transcriptions Started in Room
+ } else if (captionTranscription?.state === TranscriptionState.STOPPED) {
+ // Transcriptions Stopped in Room
+ } else if (captionTranscription?.state === TranscriptionState.FAILED) {
+ // Transcriptions failed to Start or Stop
+ }
+ }
}
- }
);
```
From c3adf0f65d349b2184530c5c7298a760c9742942 Mon Sep 17 00:00:00 2001
From: Yogesh Singh
Date: Thu, 30 Oct 2025 00:48:17 +0530
Subject: [PATCH 11/28] updated version numbers
---
docs/flutter/v2/release-notes/release-notes.mdx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/flutter/v2/release-notes/release-notes.mdx b/docs/flutter/v2/release-notes/release-notes.mdx
index 693c55060..657819adc 100644
--- a/docs/flutter/v2/release-notes/release-notes.mdx
+++ b/docs/flutter/v2/release-notes/release-notes.mdx
@@ -69,7 +69,7 @@ This change ensures your application will:
- Updated all sample applications with consistent build configurations
- Updated dependencies across all packages and examples to ensure compatibility
-Uses Android SDK 2.9.68 & iOS SDK 1.17.0
+Uses Android SDK 2.9.78 & iOS SDK 1.17.0
**Full Changelog**: [1.10.6...1.11.0](https://github.com/100mslive/100ms-flutter/compare/1.10.6...1.11.0)
From 5d10c92c451033ae04197fd16a0fe7ade9a55d61 Mon Sep 17 00:00:00 2001
From: Dmitry Fedoseyev
Date: Mon, 17 Nov 2025 14:07:09 +0200
Subject: [PATCH 12/28] 1.17.1 iOS Release (#2496)
---
docs/ios/v2/release-notes/release-notes.mdx | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/docs/ios/v2/release-notes/release-notes.mdx b/docs/ios/v2/release-notes/release-notes.mdx
index c002dbbff..f9eefe407 100644
--- a/docs/ios/v2/release-notes/release-notes.mdx
+++ b/docs/ios/v2/release-notes/release-notes.mdx
@@ -4,6 +4,12 @@ nav: 6.1
description: Release Notes for 100ms iOS SDK
---
+## 1.17.1 - 2025-11-17
+### Fixed
+
+- Enum conflicts with other WebRTC integrations
+
+
## 1.17.0 - 2025-01-27
### Changed
From 876145f351bb8b844095099b18c70c214df4fcb3 Mon Sep 17 00:00:00 2001
From: Saptarshi Das
Date: Tue, 25 Nov 2025 15:05:07 +0530
Subject: [PATCH 13/28] Added release notes for version 2.9.79, including a bug
fix for NPE during room leave.
---
docs/android/v2/release-notes/release-notes.mdx | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/docs/android/v2/release-notes/release-notes.mdx b/docs/android/v2/release-notes/release-notes.mdx
index bc5a852a6..046f2213c 100644
--- a/docs/android/v2/release-notes/release-notes.mdx
+++ b/docs/android/v2/release-notes/release-notes.mdx
@@ -19,6 +19,10 @@ import AndroidPrebuiltVersionShield from '@/common/android-prebuilt-version-shie
| live.100ms:virtual-background: ||
| live.100ms:hms-noise-cancellation-android: | |
+## v2.9.79 - 2025-11-24
+### Fixed
+* Bug fix to handle NPE at room leave.
+
## v2.9.78 - 2025-10-15
### Fixed
* Fix for memory leak issue
From 99230e9a9e5de29956df4642dc8d72359d67fec6 Mon Sep 17 00:00:00 2001
From: Ravi theja
Date: Fri, 12 Dec 2025 14:25:42 +0530
Subject: [PATCH 14/28] Added release notes for Web SDK version 0.13.0 (#2498)
---
.../v2/release-notes/release-notes.mdx | 17 +++++++++++++++++
1 file changed, 17 insertions(+)
diff --git a/docs/javascript/v2/release-notes/release-notes.mdx b/docs/javascript/v2/release-notes/release-notes.mdx
index cd52710ea..c4b7c2365 100644
--- a/docs/javascript/v2/release-notes/release-notes.mdx
+++ b/docs/javascript/v2/release-notes/release-notes.mdx
@@ -14,6 +14,23 @@ description: Release Notes for 100ms.live JavaScript SDK
| @100mslive/hms-noise-cancellation | [](https://badge.fury.io/js/%40100mslive%2Fhms-noise-cancellation) |
| @100mslive/hms-virtual-background | [](https://badge.fury.io/js/%40100mslive%2Fhms-virtual-background) |
+## 2025-12-04
+
+Released: `@100mslive/hms-video-store@0.13.0`, `@100mslive/react-sdk@0.11.0`, `@100mslive/hls-player@0.4.0`, `@100mslive/roomkit-react@0.4.0`
+
+### Added:
+
+- Upgraded UA Parser JS library for improved user agent detection
+- CPU state capture in publish analytics
+- Upgraded Effects (Virtual Background) plugin with improved CPU performance
+
+### Fixed:
+
+- Audio output device selection issues on macOS Safari 26
+- Stored devices not being removed when they become unavailable, causing constraints failure error on join
+- Error handling when no device is available during device changes
+- Audio not being published when Noise Cancellation is enabled on low network conditions
+
## 2025-08-26
Released: `@100mslive/hms-video-store@0.12.38`, `@100mslive/react-sdk@0.10.38`, `@100mslive/hls-player@0.3.38`, `@100mslive/roomkit-react@0.3.38`
From 58f539fe572e261f6afef7cf00659284eff14beb Mon Sep 17 00:00:00 2001
From: Ravi theja
Date: Fri, 12 Dec 2025 15:11:40 +0530
Subject: [PATCH 15/28] docs: update faq for speaker label logging
---
.../how-to-guides/enable-transcription-and-summary.mdx | 10 +++-------
1 file changed, 3 insertions(+), 7 deletions(-)
diff --git a/docs/server-side/v2/how-to-guides/enable-transcription-and-summary.mdx b/docs/server-side/v2/how-to-guides/enable-transcription-and-summary.mdx
index eb8807afe..7f2ac7157 100644
--- a/docs/server-side/v2/how-to-guides/enable-transcription-and-summary.mdx
+++ b/docs/server-side/v2/how-to-guides/enable-transcription-and-summary.mdx
@@ -322,22 +322,18 @@ You can always use 100ms’ Recording Assets API to access the transcripts and s
```jsx
import { useEffect } from "react";
import { useHMSActions } from "@100mslive/react-sdk";
- import { useIsHeadless } from "../AppData/useUISettings";
export function BeamSpeakerLabelsLogging() {
const hmsActions = useHMSActions();
- const isHeadless = useIsHeadless();
useEffect(() => {
- if (isHeadless) {
+ // to be called if you are passing custom url for beam recording, preferably before join.
hmsActions.enableBeamSpeakerLabelsLogging();
- }
- }, [hmsActions, isHeadless]);
+ }, [hmsActions]);
return null;
}
```
- - Register `` in Approutes along with import statement
- `import { BeamSpeakerLabelsLogging } from "./components/AudioLevel/BeamSpeakerLabelsLogging";`
+
From 6fa025ff0c31b4b612b561a6bb963298a49394ae Mon Sep 17 00:00:00 2001
From: Saptarshi Das
Date: Fri, 12 Dec 2025 16:22:02 +0530
Subject: [PATCH 16/28] docs: add customization options for foreground
notification in prebuilt Android SDK
---
.../v2/quickstart/prebuilt-android.mdx | 53 +++++++++++++++++++
1 file changed, 53 insertions(+)
diff --git a/docs/android/v2/quickstart/prebuilt-android.mdx b/docs/android/v2/quickstart/prebuilt-android.mdx
index 46ec583d6..e5827291e 100644
--- a/docs/android/v2/quickstart/prebuilt-android.mdx
+++ b/docs/android/v2/quickstart/prebuilt-android.mdx
@@ -76,5 +76,58 @@ class HMSPrebuiltOptions {
}
```
+
+
+## Customizing Foreground Notification
+
+
+When your app goes to the background during an active call, a foreground service notification is displayed to keep the call alive. You can customize this notification to match your app's branding using `CallNotificationConfig`.
+
+### Available Options
+
+| Parameter | Type | Default | Description |
+|-----------|------|---------|-------------|
+| `smallIcon` | `@DrawableRes Int?` | Default icon | Small icon shown in status bar and notification header. Should be monochrome for best results. |
+| `largeIcon` | `@DrawableRes Int?` | Default icon | Large icon shown on the right side of the notification. Can be full-color. |
+| `title` | `String?` | "Call in progress" | Notification title text. |
+| `text` | `String?` | "Tap to return to the call" | Notification body text. |
+| `channelName` | `String?` | "Ongoing Call" | Name for the notification channel (visible in system settings). |
+| `channelDescription` | `String?` | Default description | Description for the notification channel. |
+
+### Example Usage
+
+```kotlin
+import live.hms.roomkit.ui.HMSRoomKit
+import live.hms.roomkit.ui.HMSPrebuiltOptions
+import live.hms.roomkit.ui.notification.CallNotificationConfig
+
+class MainActivity : AppCompatActivity() {
+ override fun onCreate(savedInstanceState: Bundle?) {
+ super.onCreate(savedInstanceState)
+
+ val roomCode = ""
+
+ val notificationConfig = CallNotificationConfig(
+ smallIcon = R.drawable.my_app_logo, // Your app's notification icon
+ largeIcon = R.drawable.my_app_icon, // Optional larger icon
+ title = "MyApp - Call Active",
+ text = "Tap to return to your call",
+ channelName = "MyApp Calls",
+ channelDescription = "Notifications for ongoing calls"
+ )
+
+ val options = HMSPrebuiltOptions(
+ userName = "",
+ callNotificationConfig = notificationConfig
+ )
+
+ HMSRoomKit.launchPrebuilt(roomCode, this, options)
+ }
+}
+```
+
+All parameters are optional. If not provided, default values will be used.
+
+
## Sample Code
The sample project for the library is at https://github.com/100mslive/AndroidPrebuiltDemo#readme you can download the app for it [here](https://github.com/100mslive/AndroidPrebuiltDemo/suites/14604646490/artifacts/827757135).
From 536312b0fbf5e0fd04fbc8678422d24f840277ae Mon Sep 17 00:00:00 2001
From: bhavesh100ms <89893870+bhavesh100ms@users.noreply.github.com>
Date: Tue, 30 Dec 2025 21:46:15 +0530
Subject: [PATCH 17/28] feat: add prod-us7 IP address (#2500)
feat: add IP address
Co-authored-by: Bhavesh
---
docs/server-side/v2/how-to-guides/firewall-and-ports.mdx | 3 +++
docs/server-side/v2/release-notes/release-notes.mdx | 3 +++
2 files changed, 6 insertions(+)
diff --git a/docs/server-side/v2/how-to-guides/firewall-and-ports.mdx b/docs/server-side/v2/how-to-guides/firewall-and-ports.mdx
index 08ca47de2..aac022ed5 100644
--- a/docs/server-side/v2/how-to-guides/firewall-and-ports.mdx
+++ b/docs/server-side/v2/how-to-guides/firewall-and-ports.mdx
@@ -43,6 +43,9 @@ For smooth call experience add following domains and ports to your firewall whit
35.207.209.133/32
35.244.46.211/32
34.74.251.112/32
+34.169.128.194/32
+34.105.5.246/32
+34.169.40.88/32
```
### NAT gateway IP address whitelisting for webhooks
diff --git a/docs/server-side/v2/release-notes/release-notes.mdx b/docs/server-side/v2/release-notes/release-notes.mdx
index cc63ae3c7..fa100396e 100644
--- a/docs/server-side/v2/release-notes/release-notes.mdx
+++ b/docs/server-side/v2/release-notes/release-notes.mdx
@@ -4,6 +4,9 @@ nav: 5.1
---
This Changelog highlights notable changes to the 100ms server-side API, such as API additions, improvements, and deprecations. Also, we've included developer experience improvements to this page to keep you on track with items that will enhance your integration journey.
+## 2025-12-30
+### Additions
+- updated reserved IP addresses for TURN servers in EU|US|IN. These IP addresses can be whitelisted in firewall configuration to solve scenarios where a user is unable to join the room due to a restricted network firewall.
## 2024-02-28
From fdde9a3446f1766f4cb2c8797795837097931194 Mon Sep 17 00:00:00 2001
From: cyril-k-031225
Date: Fri, 2 Jan 2026 17:18:29 +0530
Subject: [PATCH 18/28] fixing api endpoint for update settings (#2503)
---
docs/server-side/v2/api-reference/policy/update-settings.mdx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/server-side/v2/api-reference/policy/update-settings.mdx b/docs/server-side/v2/api-reference/policy/update-settings.mdx
index 43a63372d..93d10203f 100644
--- a/docs/server-side/v2/api-reference/policy/update-settings.mdx
+++ b/docs/server-side/v2/api-reference/policy/update-settings.mdx
@@ -12,7 +12,7 @@ This endpoint is used to update [settings](/server-side/v2/api-reference/policy/
```bash
-curl --location --request POST 'https://api.100ms.live/v2//settings' \
+curl --location --request POST 'https://api.100ms.live/v2/templates//settings' \
--header 'Authorization: Bearer ' \
--header 'Content-Type: application/json' \
--data-raw '{
From 13c40d7ba1b1f937a64eecb170cc66e796bfdd23 Mon Sep 17 00:00:00 2001
From: cyril-k-031225
Date: Mon, 5 Jan 2026 11:31:34 +0530
Subject: [PATCH 19/28] Update docs for azure support (#2501)
---
.../storage-configuration.mdx | 11 +++++-----
.../v2/api-reference/Rooms/create-via-api.mdx | 22 +++++++++----------
.../v2/api-reference/Rooms/update-a-room.mdx | 2 +-
.../legacy-api-v1/destinations/recording.mdx | 14 ++++++------
.../policy/create-template-via-api.mdx | 6 ++---
.../v2/how-to-guides/recordings/overview.mdx | 2 +-
6 files changed, 29 insertions(+), 28 deletions(-)
diff --git a/docs/get-started/v2/get-started/features/recordings/recording-assets/storage-configuration.mdx b/docs/get-started/v2/get-started/features/recordings/recording-assets/storage-configuration.mdx
index d69e3183d..a1f547dad 100644
--- a/docs/get-started/v2/get-started/features/recordings/recording-assets/storage-configuration.mdx
+++ b/docs/get-started/v2/get-started/features/recordings/recording-assets/storage-configuration.mdx
@@ -8,6 +8,7 @@ nav: 3.6
- Amazon Simple Storage Service (AWS S3)
- Google Cloud Storage (GCP)
- Alibaba Object Storage (OSS)
+- Azure Blob Storage
By default, recordings will be stored temporarily (for 15 days) in a storage location managed by 100ms if nothing is configured. Post a successful recording, the recording assets can be accessed on the [100ms dashboard](https://dashboard.100ms.live/sessions) or [through the REST API](/server-side/v2/api-reference/recording-assets/get-asset).
@@ -59,12 +60,12 @@ You can configure storage in your template's `Recording` tab on the 100ms Dashbo
Use the [Policy API](https://www.100ms.live/docs/server-side/v2/api-reference/policy/create-template-via-api) to programmatically configure your storage location.
-You can configure the **`type`** field of recording object to `s3` for AWS, `oss` for Alibaba Object Storage Service and `gs` for Google Cloud Storage with the following details:
+You can configure the **`type`** field of recording object to `s3` for AWS, `oss` for Alibaba Object Storage Service, `gs` for Google Cloud Storage, and `azure` for Azure Blob Storage with the following details:
-- Access Key: Access Key for your OSS/GCP Bucket
-- Secret Key: Secret Key for your OSS/GCP Bucket
-- Bucket: Name of the bucket
-- Region: Name of the region where your bucket is located in
+- Access Key: Access Key for your storage bucket (for Azure, use the Storage Account Name)
+- Secret Key: Secret Key for your storage bucket (for Azure, use the Storage Account Key)
+- Bucket: Name of the bucket (for Azure, use the Container Name)
+- Region: Name of the region where your bucket is located in (for Azure, this field is not required)
- Prefix for Upload Path: Define the directory name (optional)
diff --git a/docs/server-side/v2/api-reference/Rooms/create-via-api.mdx b/docs/server-side/v2/api-reference/Rooms/create-via-api.mdx
index b06cf8f5a..a02c16376 100644
--- a/docs/server-side/v2/api-reference/Rooms/create-via-api.mdx
+++ b/docs/server-side/v2/api-reference/Rooms/create-via-api.mdx
@@ -72,10 +72,10 @@ curl --location --request POST 'https://api.100ms.live/v2/rooms' \
> **Note**: This object enables recording and configuring storage during room creation. But we recommend configuring it at a template level through the [Dashboard](https://dashboard.100ms.live/dashboard), where the config validator can help with validating inputs proactively.
-| Name | Type | Description | Required |
-| :---------- | :-------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------- |
-| enabled | `boolean` | Enable SFU recording. Disabled by default.
**Note:** This argument is only applicable to enable/disable [SFU recording](/server-side/v2/Destinations/recording). Refer to [RTMP Streaming & Browser Recording](/server-side/v2/api-reference/external-streams/overview) guide for other options. | No |
-| upload_info | `object` | Object of type `upload_info`. This object contains information on recordings storage location.
If you want to store recording with 100ms, and not use your own storage (s3/gs/oss), don't add this to the object.
Check the [upload_info object](#upload-info-arguments) below for more information. | No |
+| Name | Type | Description | Required |
+| :---------- | :-------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------- |
+| enabled | `boolean` | Enable SFU recording. Disabled by default.
**Note:** This argument is only applicable to enable/disable [SFU recording](/server-side/v2/Destinations/recording). Refer to [RTMP Streaming & Browser Recording](/server-side/v2/api-reference/external-streams/overview) guide for other options. | No |
+| upload_info | `object` | Object of type `upload_info`. This object contains information on recordings storage location.
If you want to store recording with 100ms, and not use your own storage (s3/gs/oss/azure), don't add this to the object.
Check the [upload_info object](#upload-info-arguments) below for more information. | No |
| polls | `array` | Array of poll ids that this room will have. | No |
@@ -88,13 +88,13 @@ To know more about recording please visit [Recording](/server-side/v2/Destinatio
### upload_info arguments
-| Name | Type | Description | Required |
-| :---------- | :------- | :-------------------------------------------------------------------------------------------------------------------------------------- | :------- |
-| type | `string` | Upload destination type. Currently, `s3` (AWS), `gs` (Google Cloud Storage), `oss` (Alibaba Cloud) are supported. | Yes |
-| location | `string` | Name of the storage bucket in which you want to store all recordings | Yes |
-| prefix | `string` | Upload prefix path | No |
-| options | `object` | Additional configurations of type `Options` to be used for uploading.
Check the options arguments below for more information. | No |
-| credentials | `object` | Object of type `credentials`. This is used to share the credentials to access the storage bucket specified. | No |
+| Name | Type | Description | Required |
+| :---------- | :------- | :---------------------------------------------------------------------------------------------------------------------------------------------- | :------- |
+| type | `string` | Upload destination type. Currently, `s3` (AWS), `gs` (Google Cloud Storage), `oss` (Alibaba Cloud), `azure` (Azure Blob Storage) are supported. | Yes |
+| location | `string` | Name of the storage bucket in which you want to store all recordings | Yes |
+| prefix | `string` | Upload prefix path | No |
+| options | `object` | Additional configurations of type `Options` to be used for uploading.
Check the options arguments below for more information. | No |
+| credentials | `object` | Object of type `credentials`. This is used to share the credentials to access the storage bucket specified. | No |
#### Options arguments
diff --git a/docs/server-side/v2/api-reference/Rooms/update-a-room.mdx b/docs/server-side/v2/api-reference/Rooms/update-a-room.mdx
index bd3a324d2..5f3368f1d 100644
--- a/docs/server-side/v2/api-reference/Rooms/update-a-room.mdx
+++ b/docs/server-side/v2/api-reference/Rooms/update-a-room.mdx
@@ -20,7 +20,7 @@ curl --location --request POST 'https://api.100ms.live/v2/rooms/' \
"recording_info": {
"enabled": true,
"upload_info": {
- "type": "",
+ "type": "",
"location": "",
"prefix": "",
"options": {
diff --git a/docs/server-side/v2/api-reference/legacy-api-v1/destinations/recording.mdx b/docs/server-side/v2/api-reference/legacy-api-v1/destinations/recording.mdx
index f96e6446d..ea8104b0e 100644
--- a/docs/server-side/v2/api-reference/legacy-api-v1/destinations/recording.mdx
+++ b/docs/server-side/v2/api-reference/legacy-api-v1/destinations/recording.mdx
@@ -62,13 +62,13 @@ Another way to enable SFU recording for a room is to choose `Enabled` for record
By default recordings will be uploaded to 100ms storage and a pre-signed URL for the same will be provided to customers via a webhook. The pre-signed URL will expire in 12 hours.
Customers can also configure the recordings to be stored in their cloud storage. Following are the configurations for the same.
-| Name | Type | Description | Required |
-| :---------- | :------- | :---------------------------------------------------------------------------------------------------------------- | :------- |
-| type | `string` | Upload destination type. Currently, `s3` (AWS), `gs` (Google Cloud Storage), `oss` (Alibaba Cloud) are supported. | Yes |
-| location | `string` | Name of the storage bucket in which you want to store all recordings | Yes |
-| prefix | `string` | Upload prefix path | No |
-| options | `object` | Additional configurations of type `Options` to be used for uploading | No |
-| credentials | `object` | Object of type `Credentials`. This is used to share credentials to access the storage bucket specified | No |
+| Name | Type | Description | Required |
+| :---------- | :------- | :---------------------------------------------------------------------------------------------------------------------------------------------- | :------- |
+| type | `string` | Upload destination type. Currently, `s3` (AWS), `gs` (Google Cloud Storage), `oss` (Alibaba Cloud), `azure` (Azure Blob Storage) are supported. | Yes |
+| location | `string` | Name of the storage bucket in which you want to store all recordings | Yes |
+| prefix | `string` | Upload prefix path | No |
+| options | `object` | Additional configurations of type `Options` to be used for uploading | No |
+| credentials | `object` | Object of type `Credentials`. This is used to share credentials to access the storage bucket specified | No |
Where `Options` is
diff --git a/docs/server-side/v2/api-reference/policy/create-template-via-api.mdx b/docs/server-side/v2/api-reference/policy/create-template-via-api.mdx
index e01a77817..1e97e7001 100644
--- a/docs/server-side/v2/api-reference/policy/create-template-via-api.mdx
+++ b/docs/server-side/v2/api-reference/policy/create-template-via-api.mdx
@@ -160,7 +160,7 @@ curl --location --request POST 'https://api.100ms.live/v2/templates' \
"region": "in",
"recording": {
"upload": {
- "type": "",
+ "type": "",
"location": "",
"prefix": "",
"options": {
@@ -441,7 +441,7 @@ curl --location --request POST 'https://api.100ms.live/v2/templates' \
"recording": {
"upload": {
"location": "",
- "type": "",
+ "type": "",
"prefix": "",
"credentials": {
"key": "",
@@ -695,7 +695,7 @@ curl --location --request POST 'https://api.100ms.live/v2/templates' \
| Name | Type | Description | Required |
| :---------- | :------- | :---------------------------------------------------------------------------------------------------------------- | :------- |
-| type | `string` | Upload destination type. Currently, `s3` (AWS), `gs` (Google Cloud Storage), `oss` (Alibaba Cloud) are supported. | Yes |
+| type | `string` | Upload destination type. Currently, `s3` (AWS), `gs` (Google Cloud Storage), `oss` (Alibaba Cloud), `azure` (Azure Blob Storage) are supported. | Yes |
| location | `string` | Name of the storage bucket in which you want to store all recordings | Yes |
| prefix | `string` | Upload prefix path | Yes |
| options | `object` | Additional configurations of type Options to be used for uploading | No |
diff --git a/docs/server-side/v2/how-to-guides/recordings/overview.mdx b/docs/server-side/v2/how-to-guides/recordings/overview.mdx
index 797ab9a1a..68728f232 100644
--- a/docs/server-side/v2/how-to-guides/recordings/overview.mdx
+++ b/docs/server-side/v2/how-to-guides/recordings/overview.mdx
@@ -116,7 +116,7 @@ If you are relying on the 100ms storage bucket, we recommend downloading the ass
#### Configure your own storage
-100ms supports AWS S3, Google Cloud Storage and Alibaba OSS as storage buckets. These can be configured on [the 100ms dashboard](../../../../concepts/v2/concepts/recordings#configure-storage).
+100ms supports AWS S3, Google Cloud Storage, Alibaba OSS, and Azure Blob Storage as storage buckets. These can be configured on [the 100ms dashboard](../../../../concepts/v2/concepts/recordings#configure-storage).
#### Download assets from dashboard
From 3d4e005c8f07ae5a7e9abb42941a77ef8c8c4c41 Mon Sep 17 00:00:00 2001
From: cyril-k-031225
Date: Mon, 5 Jan 2026 11:58:14 +0530
Subject: [PATCH 20/28] Added release note for Azure support (#2502)
---
docs/server-side/v2/release-notes/release-notes.mdx | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/docs/server-side/v2/release-notes/release-notes.mdx b/docs/server-side/v2/release-notes/release-notes.mdx
index fa100396e..f6c059bc0 100644
--- a/docs/server-side/v2/release-notes/release-notes.mdx
+++ b/docs/server-side/v2/release-notes/release-notes.mdx
@@ -4,6 +4,11 @@ nav: 5.1
---
This Changelog highlights notable changes to the 100ms server-side API, such as API additions, improvements, and deprecations. Also, we've included developer experience improvements to this page to keep you on track with items that will enhance your integration journey.
+
+## 2026-01-02
+### Additions
+- Added support to [Azure Blob Storage](https://azure.microsoft.com/en-us/products/storage/blobs) - `azure`. Check recording config for [Create a room API](../Rooms/create-via-api#recording_info-arguments) and [Create a template API](../policy/create-template-via-api#recording-object) for more information.
+
## 2025-12-30
### Additions
- updated reserved IP addresses for TURN servers in EU|US|IN. These IP addresses can be whitelisted in firewall configuration to solve scenarios where a user is unable to join the room due to a restricted network firewall.
From 2b019fc3df69146aadba4b4373c49dabf21f516d Mon Sep 17 00:00:00 2001
From: Aarthi Thirumalai
Date: Wed, 14 Jan 2026 11:23:06 +0530
Subject: [PATCH 21/28] add docs for webhook events PI (#2504)
* add docs for webhook events PI
* Edits
---------
Co-authored-by: Aarthi Thirumalai
---
CLAUDE.md | 224 ++++++++++++++++++
.../analytics/list-webhook-events.mdx | 112 +++++++++
.../v2/api-reference/analytics/overview.mdx | 7 +
.../analytics/replay-webhook-events.mdx | 220 +++++++++++++++++
releases.js | 2 +-
5 files changed, 564 insertions(+), 1 deletion(-)
create mode 100644 CLAUDE.md
create mode 100644 docs/server-side/v2/api-reference/analytics/list-webhook-events.mdx
create mode 100644 docs/server-side/v2/api-reference/analytics/replay-webhook-events.mdx
diff --git a/CLAUDE.md b/CLAUDE.md
new file mode 100644
index 000000000..dd98ef416
--- /dev/null
+++ b/CLAUDE.md
@@ -0,0 +1,224 @@
+# CLAUDE.md
+
+This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
+
+## Project Overview
+
+This is the documentation site for 100ms.live, built as a Next.js application that renders MDX files into interactive documentation. The site runs on port 4000 by default and uses a file-based routing system where MDX files in the `/docs` directory automatically generate pages.
+
+**Base Path:** All routes are served under `/docs` prefix (configured in [next.config.js:10](next.config.js#L10))
+
+## Development Commands
+
+**Local Development:**
+```bash
+yarn dev # Start dev server on port 4000
+yarn id # Shortcut for yarn && yarn dev
+```
+
+**Building:**
+```bash
+yarn build # Updates releases, builds Next.js, and generates search index
+yarn updatereleases # Must run before viewing local release version changes
+```
+
+**Code Quality:**
+```bash
+yarn lint # Lint components and lib files
+yarn format # Format code with Prettier
+```
+
+**Documentation Linting (Vale):**
+```bash
+brew install vale
+vale sync
+vale docs/*
+```
+
+Add whitelisted tokens to [.github/workflows/styles/Vocab/HMSVocab/accept.txt](.github/workflows/styles/Vocab/HMSVocab/accept.txt)
+
+**Build Artifacts:**
+- Post-build generates sitemap via `next-sitemap` (configured in [next-sitemap.js](next-sitemap.js))
+- Search index is generated by [searchIndex.js](searchIndex.js) which calls `updateIndex()` from [lib/algolia/getRecords.ts](lib/algolia/getRecords.ts)
+
+## Architecture
+
+### Documentation Structure
+
+**File System Routing:**
+- All docs are MDX files in `/docs` directory, organized by platform:
+ - `/docs/android/` - Android SDK docs
+ - `/docs/ios/` - iOS SDK docs
+ - `/docs/javascript/` - JavaScript SDK docs
+ - `/docs/flutter/` - Flutter SDK docs
+ - `/docs/react-native/` - React Native SDK docs
+ - `/docs/server-side/` - Server-side API docs
+ - `/docs/prebuilt/` - Prebuilt UI docs
+ - `/docs/get-started/` - Getting started guides
+ - `/docs/api-reference/` - API reference docs
+
+**MDX Processing Pipeline:**
+The main routing is handled by [pages/[...slug].tsx](pages/[...slug].tsx):
+1. **Static Generation:** Uses `getStaticPaths()` to find all MDX files via [lib/mdxUtils.ts](lib/mdxUtils.ts)
+2. **Content Bundling:** Uses `mdx-bundler` to compile MDX with plugins
+3. **Rendering:** MDX components defined in [components/MDXComponents.tsx](components/MDXComponents.tsx)
+
+**Remark/Rehype Plugin Chain:**
+- `imagePlugin` - Custom image handling
+- `remarkGfm` - GitHub Flavored Markdown
+- `remarkA11yEmoji` - Accessible emoji
+- `remarkCodeHeader` - Code block headers
+- `withTableofContents` - TOC generation
+- `rehypeRaw` - Raw HTML support
+- `mdxPrism` - Syntax highlighting
+
+### FrontMatter Requirements
+
+Every MDX file must include:
+```yaml
+---
+title: Page Title # Used for SEO and page heading
+nav: 14 # Sidebar ordering (can be decimal for insertion)
+---
+```
+
+**Note:** If no `nav` value is specified, it defaults to `Infinity` (appears at the end)
+
+### Navigation System
+
+Navigation is auto-generated from the file system:
+- [lib/mdxUtils.ts](lib/mdxUtils.ts) scans all MDX files
+- `getAllDocs()` extracts frontmatter (title, nav, description)
+- `getNavfromDocs()` builds nested navigation structure using dot notation
+- Sidebar ordering controlled by `nav` frontmatter value
+
+### Custom Components
+
+MDX files have access to auto-imported components from [components/MDXComponents.tsx](components/MDXComponents.tsx):
+
+**Note Components:**
+```mdx
+> Default note (uses blockquote)
+
+Success message
+Error message
+Warning message
+```
+
+**Tabs:**
+```mdx
+
+
+Java code here
+
+
+Kotlin code here
+
+```
+**Important:** Tab IDs must match the Tabs `id` with index suffix
+
+**Other Components:**
+- `` - Embed CodeSandbox
+- `` - Automatic code wrapper with copy button
+- API request components: ``, ``, ``, ``, ``
+- Layout components: ``, ``, ``, ``, ``, ``
+
+### Content Reuse
+
+To avoid duplicating common content:
+1. Create a file in `/common` directory (`.md` or `.mdx`)
+ - Use `.md` for plain Markdown
+ - Use `.mdx` if embedding JSX (escape `<>{}` with backslash or use backticks)
+2. Import as PascalCase: `import Test from '@/common/test.md'`
+3. Use in MDX: ``
+
+### Version Management
+
+Release versions are tracked in [releases.js](releases.js) and automatically updated:
+- `yarn updatereleases` scans `/docs` for latest version numbers
+- Uses [lib/getNewReleases.js](lib/getNewReleases.js) to extract versions from release note files
+- Updates `releases.js` with platform versions and dates
+
+### Redirects
+
+Extensive redirect configuration in [next.config.js](next.config.js):
+- **Rewrites** (lines 33-56): URL normalization for case-sensitive paths
+- **Redirects** (lines 57-1550): Legacy URL redirects, doc reorganization redirects
+- Pattern: Old doc structure redirected to new `how-to-guides` organization
+
+**Key redirect patterns:**
+- `/concepts/` → `/get-started/`
+- Platform-specific `/:platform/v2/guides:path` → `/:platform/v2/get-started:path`
+- Feature docs reorganized under `how-to-guides` with categorization
+
+### Algolia Search
+
+Search powered by Algolia:
+- Index built during `yarn build` via [searchIndex.js](searchIndex.js)
+- Implementation in [lib/algolia/](lib/algolia/)
+- Search UI components in [components/](components/)
+
+## File Naming Conventions
+
+**DO:**
+- Use kebab-case for filenames: `my-feature.mdx`
+- Keep titles in frontmatter, not filename
+
+**DON'T:**
+- Use decimal numbers in filenames: ~~`v-1.3.2.mdx`~~ (use frontmatter `title` instead)
+- Use ampersands: ~~`tips-&-tricks.mdx`~~ (breaks sitemap generation)
+- Use bold in headers: ~~`## **Don't**~~
+- Use emojis in filenames (but DO use them in content!)
+
+## Adding New Documentation
+
+### To Existing Section
+1. Create MDX file in appropriate `/docs/[platform]/` subdirectory
+2. Add frontmatter with `title` and `nav` value
+3. Folder names become section headers (capitalized, hyphens → spaces)
+
+### New Documentation Version
+To add a new version (e.g., `v3`):
+1. Create `/docs/v3` folder
+2. Create `/pages/v3/index.tsx` with redirect:
+ ```tsx
+ import redirect from '@/lib/redirect';
+ export default redirect('/v3/100ms-v3/basics');
+ ```
+3. Add MDX files following existing structure
+
+## Build Configuration
+
+**Node Version:** `^22` (specified in [package.json:36](package.json#L36))
+
+**Key Dependencies:**
+- `next@12.3.4` - Next.js framework
+- `mdx-bundler@^9.2.1` - MDX compilation
+- `@100mslive/react-ui` & `@100mslive/react-icons` - 100ms UI library
+- `algoliasearch` - Search functionality
+- `shiki` - Syntax highlighting
+
+**Webpack Config:**
+- Raw loader for `.md` files (allows importing as strings)
+- ESM externals set to 'loose' for compatibility
+
+## Styling
+
+All styles use CSS variables (tokens) defined in [styles/theme.css](styles/theme.css):
+- Tokens prefixed with `token` control syntax highlighting
+- Fully customizable theme via CSS variables
+- No CSS-in-JS, plain CSS with variables
+
+## Content Guidelines
+
+From [README.md](README.md):
+
+**DO:**
+- Use emojis in content
+- Maintain header hierarchy (H1 → H2 → H3)
+- Add language attributes to code blocks for syntax highlighting
+- Use https://tableconvert.com/ for Markdown tables
+
+**DON'T:**
+- Use bold in headers
+- Use decimal numbers in filenames
diff --git a/docs/server-side/v2/api-reference/analytics/list-webhook-events.mdx b/docs/server-side/v2/api-reference/analytics/list-webhook-events.mdx
new file mode 100644
index 000000000..a386c2194
--- /dev/null
+++ b/docs/server-side/v2/api-reference/analytics/list-webhook-events.mdx
@@ -0,0 +1,112 @@
+---
+title: List Webhook Events
+nav: 3.64
+---
+
+This API retrieves webhook delivery history including successful and failed webhook events. It can be used to identify failed webhook deliveries and monitor webhook health.
+
+
+
+
+```bash
+curl --location --request GET \
+ 'https://api.100ms.live/v2/analytics/webhooks?start_time=2025-12-28T00:00:00Z&status=failed&limit=20' \
+ --header 'Authorization: Bearer '
+```
+
+
+
+### Allowed Filters
+To be specified as query parameters
+
+| Name | Type | Description | Required |
+| ----------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------- |
+| start_time | `string` | Start of time range in RFC3339 format (Default: 24 hours ago). Must be within the last 30 days. | no |
+| end_time | `string` | End of time range in RFC3339 format (Default: Now). Must be within 14 days after `start_time`. | no |
+| room_id | `string` | Unique identifier of the room you wish to fetch webhook events for | no |
+| status | `string` | Filter by delivery status: `all`, `failed`, `success` (Default: `all`) | no |
+| event_names | `string` | Filter by original event types (e.g., `session.open.success`).
Can be specified multiple times to include more than one event (for example `event_names=session.open.success&` `event_names=peer.join.success`). (Default: all events) | no |
+| limit | `int` | Items per page (Default: `50`, Min: `1`, Max: `100`) | no |
+| page | `int` | Page number (Default: `1`) | no |
+
+### Response
+
+
+
+```json
+{
+ "events": [
+ {
+ "event_id": "abc123-def456",
+ "event_name": "reporter.webhook.delivery.failed",
+ "room_id": "room-123",
+ "event_timestamp": "2025-12-29T10:30:00Z",
+ "payload": {
+ "event_data": {
+ "session_id": "63f73bf05223403c9671c5c9",
+ "peer_id": "c8e85ab4-d533-4de0-ba7c-4c58a4de6c74",
+ "user_id": "187a1a92-150f-4506-83b7-d8a1cd716fb0"
+ },
+ "webhook_url": "https://example.com/webhook",
+ "original_event_name": "session.open.success",
+ "error_reason": "timeout"
+ }
+ }
+ ],
+ "pagination": {
+ "current_page": 1,
+ "page_size": 50,
+ "total_pages": 10,
+ "total_items": 500,
+ "has_next": true,
+ "has_prev": false
+ },
+ "summary": {
+ "total_events": 500
+ }
+}
+```
+
+
+
+> **Note:** The `error_reason` field is only present for failed webhook events.
+
+### Supported Event Types
+
+You can filter webhooks by the following event types:
+
+| Event Type | Description |
+| -------------------------- | ------------------------------ |
+| `session.open.success` | Session started |
+| `session.close.success` | Session ended |
+| `peer.join.success` | Peer joined the room |
+| `peer.leave.success` | Peer left the room |
+| `recording.success` | Recording completed |
+| `recording.failed` | Recording failed |
+| `transcription.success` | Transcription completed |
+| `beam.started.success` | RTMP/HLS streaming started |
+| `beam.stopped.success` | RTMP/HLS streaming stopped |
+| `beam.recording.success` | Browser recording completed |
+| `hls.recording.success` | HLS recording completed |
+| `stream.recording.success` | Stream recording completed |
+
+### Error Responses
+
+| Error Code | Message | Description |
+|------------|----------------------------------------------|-------------------------------------------------|
+| 400 | start_time must be within the last 30 days | Invalid time range parameters |
+| 400 | end_time must be within 14 days after start_time | Invalid time range parameters |
+| 403 | insufficient permissions | Unauthorized access or invalid management token |
+| 429 | rate limit exceeded, retry after 45 seconds | Too many requests, rate limit exceeded |
+
+### Why would you use this API?
+
+- **Debug webhook issues:** Retrieve delivery history to analyze failures, identify patterns, and determine when issues occurred
+
+### Postman collection
+
+You can use our Postman collection to start exploring 100ms APIs.
+
+[](https://god.gw.postman.com/run-collection/22726679-47dcd974-29d5-4965-a35b-bf9b74a8b25a?action=collection%2Ffork&collection-url=entityId%3D22726679-47dcd974-29d5-4965-a35b-bf9b74a8b25a%26entityType%3Dcollection%26workspaceId%3Dd9145dd6-337b-4761-81d6-21a30b4147a2)
+
+Refer to the [Postman guide](/server-side/v2/introduction/postman-guide) to get started with 100ms API collection.
diff --git a/docs/server-side/v2/api-reference/analytics/overview.mdx b/docs/server-side/v2/api-reference/analytics/overview.mdx
index be6b89d8d..6044e7e5a 100644
--- a/docs/server-side/v2/api-reference/analytics/overview.mdx
+++ b/docs/server-side/v2/api-reference/analytics/overview.mdx
@@ -24,12 +24,19 @@ Analytics APIs can be utilized to retrieve events via an HTTP request. By using
- `client.disconnected`
- `client.connect.failed`
+- [List Webhook Events](/server-side/v2/api-reference/analytics/list-webhook-events)
+ - Retrieve webhook delivery history
+
+- [Replay Webhook Events](/server-side/v2/api-reference/analytics/replay-webhook-events)
+ - Replay failed or successful webhooks
+
Event data can be queried up to **last 30 days**.
### What can I build?
- You can use track events to get a better understanding of user activity and build tools around it as explained in [use cases](/server-side/v2/api-reference/analytics/track-events#why-would-you-use-this-api)
- You can use error events to dig deeper into the issues which your users are facing.
+- You can use webhook events to monitor webhook delivery health and replay failed webhooks after recovering from downtime.
diff --git a/docs/server-side/v2/api-reference/analytics/replay-webhook-events.mdx b/docs/server-side/v2/api-reference/analytics/replay-webhook-events.mdx
new file mode 100644
index 000000000..7dfc13ff4
--- /dev/null
+++ b/docs/server-side/v2/api-reference/analytics/replay-webhook-events.mdx
@@ -0,0 +1,220 @@
+---
+title: Replay Webhook Events
+nav: 3.65
+---
+
+Initiates a replay of webhooks that failed to deliver. This is particularly useful when recovering from server downtime or when you need to reprocess events with updated logic.
+
+
+
+
+```bash
+curl --location --request POST \
+ 'https://api.100ms.live/v2/analytics/webhooks/replay' \
+ --header 'Authorization: Bearer ' \
+ --header 'Content-Type: application/json' \
+ --data-raw '{
+ "start_time": "2025-12-28T00:00:00Z",
+ "end_time": "2025-12-29T00:00:00Z",
+ "event_names": ["peer.join.success"]
+ }'
+```
+
+
+
+### Allowed Filters
+To be specified as query parameters
+
+| Name | Type | Description | Required |
+| ----------- | ---------- | ------------------------------------------------------------------------------------------------------------------ | -------- |
+| start_time | `string` | Start of time range to replay in RFC3339 format (Default: 24 hours ago). Must be within the last 30 days. | no |
+| end_time | `string` | End of time range to replay in RFC3339 format (Default: Now). Must be after `start_time` but within 14 days of start time. | no |
+| room_id | `string` | Unique identifier of the room you wish to replay webhooks for | no |
+| event_names | `string[]` | Filter by original event types (e.g., `peer.join.success`) | no |
+| limit | `int` | Max webhooks to replay (Default: `50000`, Max: `50000`) | no |
+| offset | `int` | Skip first N webhooks for pagination (Default: `0`) | no |
+| status | `string` | Filter by delivery status: `all`, `failed`, `success` (Default: `failed`) | no |
+
+### Response
+
+
+
+```json
+{
+ "workflow_id": "webhook-replay-customer123-a1b2c3d4",
+ "status": "started",
+ "message": "Webhook replay has been queued for processing",
+ "filters": {
+ "start_time": "2025-12-28T00:00:00Z",
+ "end_time": "2025-12-29T00:00:00Z",
+ "room_id": "",
+ "event_types": ["peer.join.success", "peer.leave.success"]
+ }
+}
+```
+
+
+
+**Status Values:**
+
+| Status | Description |
+| ----------------- | ----------------------------------------- |
+| `started` | Workflow has been started |
+| `already_running` | Identical replay is already in progress |
+
+
+### Supported Event Types
+
+You can filter specific webhooks to replay by the following event types:
+
+| Event Type | Description |
+| -------------------------- | ------------------------------ |
+| `session.open.success` | Session started |
+| `session.close.success` | Session ended |
+| `peer.join.success` | Peer joined the room |
+| `peer.leave.success` | Peer left the room |
+| `recording.success` | Recording completed |
+| `recording.failed` | Recording failed |
+| `transcription.success` | Transcription completed |
+| `beam.started.success` | RTMP/HLS streaming started |
+| `beam.stopped.success` | RTMP/HLS streaming stopped |
+| `beam.recording.success` | Browser recording completed |
+| `hls.recording.success` | HLS recording completed |
+| `stream.recording.success` | Stream recording completed |
+
+### How Replay Works
+
+Once a replay is triggered, the system:
+
+1. Starts a workflow that fetches webhooks from the database in batches
+2. Each webhook is sent to your configured webhook URL with an `is_replay: true` flag
+3. Webhook replay events are sent at a rate of 10 events per second to your endpoint
+4. Maximum 50,000 webhooks are supported per replay request
+5. Duplicate replay requests with identical filters will automatically reuse the existing workflow
+
+> **Note:** Ensure your webhook endpoints are scaled appropriately before triggering the replay API to handle the incoming event rate.
+
+
+### Pagination for Large Replays
+
+For replays with more than 50,000 webhooks, use pagination by making multiple requests with different offsets:
+
+```bash
+# First batch (0-50,000)
+curl --location --request POST \
+ 'https://api.100ms.live/v2/analytics/webhooks/replay' \
+ --header 'Authorization: Bearer ' \
+ --header 'Content-Type: application/json' \
+ --data-raw '{
+ "start_time": "2025-12-28T00:00:00Z",
+ "limit": 50000,
+ "offset": 0
+ }'
+
+# Second batch (50,000-100,000) - Run after first completes
+curl --location --request POST \
+ 'https://api.100ms.live/v2/analytics/events/webhooks/replay' \
+ --header 'Authorization: Bearer ' \
+ --header 'Content-Type: application/json' \
+ --data-raw '{
+ "start_time": "2025-12-28T00:00:00Z",
+ "limit": 50000,
+ "offset": 50000
+ }'
+```
+
+### Error Responses
+
+| Error Code | Message | Description |
+|------------|---------------------------------------------------|-------------------------------------------------|
+| 400 | start_time must be within the last 30 days | Invalid time range parameters |
+| 400 | end_time must be within 14 days after start_time | Invalid time range parameters |
+| 403 | insufficient permissions | Unauthorized access or invalid management token |
+| 404 | workflow not found | The specified workflow ID does not exist |
+| 429 | rate limit exceeded, retry after 45 seconds | Too many requests, rate limit exceeded |
+| 503 | webhook replay service unavailable | Service is temporarily unavailable |
+
+### Why would you use this API?
+
+- **Recover from downtime:** Replay failed webhooks after your server recovers from an outage or maintenance period
+- **Reprocess events:** Replay specific event types following bug fixes, data corrections, or logic updates in your webhook handler
+
+## Check Replay Results
+
+Monitor the status and progress of a webhook replay workflow.
+
+
+
+
+```bash
+curl --location --request GET \
+ 'https://api.100ms.live/v2/analytics/webhooks/replay/webhook-replay-customer123-a1b2c3d4' \
+ --header 'Authorization: Bearer '
+```
+
+
+
+### Allowed Filters
+To be specified as query parameters
+
+| Name | Type | Description | Required |
+| ----------- | -------- | ------------------------------------------------------- | -------- |
+| workflow_id | `string` | The workflow ID returned from POST /webhooks/replay | yes |
+
+### Response
+
+
+
+```json
+{
+ "workflow_id": "webhook-replay-customer123-a1b2c3d4",
+ "status": "completed",
+ "started_at": "2025-12-29T10:00:00Z",
+ "completed_at": "2025-12-29T10:15:00Z",
+ "filters": {
+ "start_time": "2025-12-28T00:00:00Z",
+ "end_time": "2025-12-29T00:00:00Z",
+ "room_id": "",
+ "event_types": ["peer.join.success"],
+ "limit": 50000,
+ "offset": 0
+ },
+ "progress": {
+ "total": 150,
+ "completed": 148,
+ "failed": 2
+ },
+ "result": {
+ "total": 150,
+ "completed": 148,
+ "failed": 2,
+ "failed_events": ["event-id-1", "event-id-2"]
+ }
+}
+```
+
+
+
+**Status Values:**
+
+| Status | Description |
+| --------------------------- | -------------------------------------------- |
+| `Running` | Workflow is currently processing webhooks |
+| `completed` | All webhooks replayed successfully |
+| `completed_with_failures` | Replay finished but some webhooks failed |
+| `failed` | All webhooks failed to replay |
+| `Canceled` | Workflow was canceled |
+| `Terminated` | Workflow was terminated |
+
+### Why would you use this API?
+
+- **Track workflow progress:** Monitor the total number of events queued for replay, completion counts, and failure rates in real-time
+- **Analyze failure statistics:** Retrieve a detailed list of webhook events that failed to replay within the current workflow for troubleshooting
+
+### Postman collection
+
+You can use our Postman collection to start exploring 100ms APIs.
+
+[](https://god.gw.postman.com/run-collection/22726679-47dcd974-29d5-4965-a35b-bf9b74a8b25a?action=collection%2Ffork&collection-url=entityId%3D22726679-47dcd974-29d5-4965-a35b-bf9b74a8b25a%26entityType%3Dcollection%26workspaceId%3Dd9145dd6-337b-4761-81d6-21a30b4147a2)
+
+Refer to the [Postman guide](/server-side/v2/introduction/postman-guide) to get started with 100ms API collection.
diff --git a/releases.js b/releases.js
index 2833f85d4..6f76bfd59 100644
--- a/releases.js
+++ b/releases.js
@@ -1 +1 @@
-exports.releases = releases = {"Android":{"version":"v2.9.65","date":"August 13, 2024"},"iOS":{"version":"1.16.0","date":"August 13, 2024"},"React Native":{"version":"1.10.9","date":"July 31, 2024"},"Web":{"version":"2024-08-16","date":"August 16, 2024"},"Flutter":{"version":"1.10.5","date":"July 25, 2024"},"Server-side":{"version":"2024-02-28","date":"February 28, 2024"}}
\ No newline at end of file
+exports.releases = releases = {"Android":{"version":"v2.9.79","date":"November 24, 2025"},"iOS":{"version":"1.17.1","date":"November 17, 2025"},"React Native":{"version":"1.12.0","date":"October 28, 2025"},"Web":{"version":"2025-12-04","date":"December 04, 2025"},"Flutter":{"version":"1.11.0","date":"October 29, 2025"},"Server-side":{"version":"2026-01-02","date":"January 02, 2026"}}
\ No newline at end of file
From 1e75ea166dd6cb9f0019ab7a3232ceb29c1efffa Mon Sep 17 00:00:00 2001
From: Ravi theja
Date: Tue, 20 Jan 2026 17:29:19 +0530
Subject: [PATCH 22/28] docs: add release notes for web (#2505)
---
.../v2/release-notes/release-notes.mdx | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
diff --git a/docs/javascript/v2/release-notes/release-notes.mdx b/docs/javascript/v2/release-notes/release-notes.mdx
index c4b7c2365..65aa0e0f9 100644
--- a/docs/javascript/v2/release-notes/release-notes.mdx
+++ b/docs/javascript/v2/release-notes/release-notes.mdx
@@ -14,6 +14,22 @@ description: Release Notes for 100ms.live JavaScript SDK
| @100mslive/hms-noise-cancellation | [](https://badge.fury.io/js/%40100mslive%2Fhms-noise-cancellation) |
| @100mslive/hms-virtual-background | [](https://badge.fury.io/js/%40100mslive%2Fhms-virtual-background) |
+## 2026-01-20
+
+Released: `@100mslive/hms-video-store@0.13.1`, `@100mslive/react-sdk@0.11.1`, `@100mslive/hls-player@0.4.1`, `@100mslive/roomkit-react@0.4.1`
+
+### Added:
+
+- Capture CPU pressure state per sample for better performance monitoring
+- Enhanced track interruption analytics with detailed track information
+- Source stats with fallback mechanism for improved reliability
+- Upgraded Effects SDK to version 3.6.2 for better performance
+
+### Fixed:
+
+- macOS compatibility check in prebuilt binaries
+- Computation of sourceFramesDropped metric in local track stats
+
## 2025-12-04
Released: `@100mslive/hms-video-store@0.13.0`, `@100mslive/react-sdk@0.11.0`, `@100mslive/hls-player@0.4.0`, `@100mslive/roomkit-react@0.4.0`
From ba939189e6b81f3a8328f26c6b4be23fcab38a15 Mon Sep 17 00:00:00 2001
From: cyril-k-031225
Date: Wed, 11 Feb 2026 10:25:29 +0530
Subject: [PATCH 23/28] doc: add release notes for web v0.13.2 (#2507)
---
docs/javascript/v2/release-notes/release-notes.mdx | 14 ++++++++++++++
releases.js | 2 +-
2 files changed, 15 insertions(+), 1 deletion(-)
diff --git a/docs/javascript/v2/release-notes/release-notes.mdx b/docs/javascript/v2/release-notes/release-notes.mdx
index 65aa0e0f9..0ae0e850d 100644
--- a/docs/javascript/v2/release-notes/release-notes.mdx
+++ b/docs/javascript/v2/release-notes/release-notes.mdx
@@ -14,6 +14,20 @@ description: Release Notes for 100ms.live JavaScript SDK
| @100mslive/hms-noise-cancellation | [](https://badge.fury.io/js/%40100mslive%2Fhms-noise-cancellation) |
| @100mslive/hms-virtual-background | [](https://badge.fury.io/js/%40100mslive%2Fhms-virtual-background) |
+## 2026-02-10
+
+Released: `@100mslive/hms-video-store@0.13.2`, `@100mslive/react-sdk@0.11.2`, `@100mslive/hls-player@0.4.2`, `@100mslive/roomkit-react@0.4.2`, `@100mslive/hms-whiteboard@0.1.2`
+
+### Added:
+
+- Enhanced track analytics capturing detailed metadata for local audio and video tracks
+- Online/Offline connection status indicators in whiteboard for better visibility
+- Debug logging for whiteboard errors to aid in troubleshooting
+
+### Fixed:
+
+- Whiteboard connection status handling improvements with better error recovery
+
## 2026-01-20
Released: `@100mslive/hms-video-store@0.13.1`, `@100mslive/react-sdk@0.11.1`, `@100mslive/hls-player@0.4.1`, `@100mslive/roomkit-react@0.4.1`
diff --git a/releases.js b/releases.js
index 6f76bfd59..2a481ab01 100644
--- a/releases.js
+++ b/releases.js
@@ -1 +1 @@
-exports.releases = releases = {"Android":{"version":"v2.9.79","date":"November 24, 2025"},"iOS":{"version":"1.17.1","date":"November 17, 2025"},"React Native":{"version":"1.12.0","date":"October 28, 2025"},"Web":{"version":"2025-12-04","date":"December 04, 2025"},"Flutter":{"version":"1.11.0","date":"October 29, 2025"},"Server-side":{"version":"2026-01-02","date":"January 02, 2026"}}
\ No newline at end of file
+exports.releases = releases = {"Android":{"version":"v2.9.79","date":"November 24, 2025"},"iOS":{"version":"1.17.1","date":"November 17, 2025"},"React Native":{"version":"1.12.0","date":"October 28, 2025"},"Web":{"version":"2026-02-10","date":"February 10, 2026"},"Flutter":{"version":"1.11.0","date":"October 29, 2025"},"Server-side":{"version":"2026-01-02","date":"January 02, 2026"}}
\ No newline at end of file
From 768e4a1860c2d2bf61eeb488ab5cb2b6202dfc35 Mon Sep 17 00:00:00 2001
From: sapta100ms
Date: Wed, 11 Feb 2026 10:28:40 +0530
Subject: [PATCH 24/28] Release notes for android sdk ver 2.9.80 (#2506)
update release notes for Android v2.9.80
Co-authored-by: Ravi theja
---
docs/android/v2/release-notes/release-notes.mdx | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/docs/android/v2/release-notes/release-notes.mdx b/docs/android/v2/release-notes/release-notes.mdx
index 046f2213c..295a96e2d 100644
--- a/docs/android/v2/release-notes/release-notes.mdx
+++ b/docs/android/v2/release-notes/release-notes.mdx
@@ -19,6 +19,10 @@ import AndroidPrebuiltVersionShield from '@/common/android-prebuilt-version-shie
| live.100ms:virtual-background: ||
| live.100ms:hms-noise-cancellation-android: | |
+## v2.9.80 - 2026-02-10
+### Fixed
+* Bug fix for image capture callback reliability during camera operations.
+
## v2.9.79 - 2025-11-24
### Fixed
* Bug fix to handle NPE at room leave.
From ce6ebf1056f0a5ce560733c578d00d71cca5448a Mon Sep 17 00:00:00 2001
From: Anshul
Date: Wed, 11 Feb 2026 10:58:43 +0530
Subject: [PATCH 25/28] fix: path of room apis
---
.../server-side/v2/api-reference/recordings/get-recording.mdx | 4 ++--
.../v2/api-reference/recordings/pause-recording-for-room.mdx | 4 ++--
.../v2/api-reference/recordings/resume-recording-for-room.mdx | 4 ++--
.../v2/api-reference/recordings/stop-recording-by-id.mdx | 4 ++--
4 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/docs/server-side/v2/api-reference/recordings/get-recording.mdx b/docs/server-side/v2/api-reference/recordings/get-recording.mdx
index 8b6b8ed8b..9b4226835 100644
--- a/docs/server-side/v2/api-reference/recordings/get-recording.mdx
+++ b/docs/server-side/v2/api-reference/recordings/get-recording.mdx
@@ -7,11 +7,11 @@ Use this request to get the recording job object, at any point after it has been
This can be used to fetch recording assets that are generated after the recording has completed.
-
+
```bash
-curl --location --request GET 'https://api.100ms.live/v2/recordings/' \
+curl --location --request GET 'https://api.100ms.live/v2/recordings/' \
--header 'Authorization: Bearer '
```
diff --git a/docs/server-side/v2/api-reference/recordings/pause-recording-for-room.mdx b/docs/server-side/v2/api-reference/recordings/pause-recording-for-room.mdx
index 35ccfe5e1..471723f2a 100644
--- a/docs/server-side/v2/api-reference/recordings/pause-recording-for-room.mdx
+++ b/docs/server-side/v2/api-reference/recordings/pause-recording-for-room.mdx
@@ -7,11 +7,11 @@ Use this API to pause the recording that is running for a room.
The recording can be [resumed](./resume-recording-for-room) later.
-
+
```bash
-curl --location --request POST 'https://api.100ms.live/v2/recordings//pause' \
+curl --location --request POST 'https://api.100ms.live/v2/recordings/room//pause' \
--header 'Authorization: Bearer ' \
```
diff --git a/docs/server-side/v2/api-reference/recordings/resume-recording-for-room.mdx b/docs/server-side/v2/api-reference/recordings/resume-recording-for-room.mdx
index 0528b4cda..03ab4a55d 100644
--- a/docs/server-side/v2/api-reference/recordings/resume-recording-for-room.mdx
+++ b/docs/server-side/v2/api-reference/recordings/resume-recording-for-room.mdx
@@ -5,11 +5,11 @@ nav: 3.147
Use this API to resume the recording that is paused for a room.
-
+
```bash
-curl --location --request POST 'https://api.100ms.live/v2/recordings//resume' \
+curl --location --request POST 'https://api.100ms.live/v2/recordings/room//resume' \
--header 'Authorization: Bearer ' \
```
diff --git a/docs/server-side/v2/api-reference/recordings/stop-recording-by-id.mdx b/docs/server-side/v2/api-reference/recordings/stop-recording-by-id.mdx
index 4f320a6bf..03e03dffb 100644
--- a/docs/server-side/v2/api-reference/recordings/stop-recording-by-id.mdx
+++ b/docs/server-side/v2/api-reference/recordings/stop-recording-by-id.mdx
@@ -5,11 +5,11 @@ nav: 3.145
Use this to stop a specific recording by its unique identifier.
-
+
```bash
-curl --location --request POST 'https://api.100ms.live/v2/recordings//stop' \
+curl --location --request POST 'https://api.100ms.live/v2/recordings//stop' \
--header 'Authorization: Bearer ' \
```
From 303a2e8ab814fdb2db6b7de6568ddd94165c9d08 Mon Sep 17 00:00:00 2001
From: Anshul
Date: Sun, 15 Feb 2026 10:06:43 +0530
Subject: [PATCH 26/28] add peer quality api documentation (#2509)
---
.../v2/api-reference/analytics/overview.mdx | 4 +
.../analytics/peer-quality-stats.mdx | 210 ++++++++++++++++++
2 files changed, 214 insertions(+)
create mode 100644 docs/server-side/v2/api-reference/analytics/peer-quality-stats.mdx
diff --git a/docs/server-side/v2/api-reference/analytics/overview.mdx b/docs/server-side/v2/api-reference/analytics/overview.mdx
index 6044e7e5a..8a154299a 100644
--- a/docs/server-side/v2/api-reference/analytics/overview.mdx
+++ b/docs/server-side/v2/api-reference/analytics/overview.mdx
@@ -30,6 +30,9 @@ Analytics APIs can be utilized to retrieve events via an HTTP request. By using
- [Replay Webhook Events](/server-side/v2/api-reference/analytics/replay-webhook-events)
- Replay failed or successful webhooks
+- [Peer Quality Stats](/server-side/v2/api-reference/analytics/peer-quality-stats)
+ - Time-series quality metrics (bitrate, packet loss, FPS, RTT) for a peer in a session
+
Event data can be queried up to **last 30 days**.
### What can I build?
@@ -37,6 +40,7 @@ Event data can be queried up to **last 30 days**.
- You can use track events to get a better understanding of user activity and build tools around it as explained in [use cases](/server-side/v2/api-reference/analytics/track-events#why-would-you-use-this-api)
- You can use error events to dig deeper into the issues which your users are facing.
- You can use webhook events to monitor webhook delivery health and replay failed webhooks after recovering from downtime.
+- You can use peer quality stats to debug call quality issues, monitor network conditions, and build custom quality dashboards.
diff --git a/docs/server-side/v2/api-reference/analytics/peer-quality-stats.mdx b/docs/server-side/v2/api-reference/analytics/peer-quality-stats.mdx
new file mode 100644
index 000000000..37552b93b
--- /dev/null
+++ b/docs/server-side/v2/api-reference/analytics/peer-quality-stats.mdx
@@ -0,0 +1,210 @@
+---
+title: Peer Quality Stats
+nav: 3.67
+---
+
+Peer Quality Stats API provides time-series quality metrics for a specific peer in a session. Use this to analyze publishing and subscribing performance, including bitrate, packet loss, FPS, round-trip time, and more.
+
+This is useful for debugging call quality issues, monitoring network conditions, and building custom quality dashboards.
+
+This API is not real-time. Quality stats data is only available after the session has ended.
+
+
+
+
+```bash
+curl --location --request GET \
+ 'https://api.100ms.live/v2/analytics/peer-stats?peer_id=&session_id=' \
+ --header 'Authorization: Bearer '
+```
+
+
+
+## Query Parameters
+
+| Name | Type | Description | Required |
+| ---------- | -------- | ------------------------------------------------------------------ | -------- |
+| peer_id | `string` | Unique identifier of the peer/participant | yes |
+| session_id | `string` | Unique identifier of the session | yes |
+
+## Response Object
+
+| Attribute | Type | Description |
+| ----------- | -------- | -------------------------------------------------------------- |
+| peer_id | `string` | Unique identifier of the peer |
+| session_id | `string` | Unique identifier of the session |
+| room_id | `string` | Unique identifier of the room |
+| publisher | `object` | Contains `video` and `audio` arrays with publish-side metrics |
+| subscriber | `object` | Contains `video` and `audio` arrays with subscribe-side metrics|
+
+### Publisher Video Object
+
+| Attribute | Type | Description |
+| ----------- | -------- | --------------------------------------------------------------------------- |
+| track_id | `string` | UUID of the track |
+| type | `string` | Track type: `regular` or `screen` |
+| rid | `string \| null` | Simulcast layer: `h` (high), `m` (medium), `l` (low), or `null` |
+| time_series | `object` | Time-series metrics for the track |
+
+**Publisher video `time_series` fields:**
+
+| Field | Type | Description |
+| ------------------- | ---------- | --------------------------------------- |
+| timestamps | `number[]` | Unix timestamps in milliseconds |
+| bitrate | `number[]` | Bitrate in bits per second |
+| fps | `number[]` | Frames per second |
+| packet_loss_percent | `number[]` | Packet loss as a percentage |
+| rtt_ms | `number[]` | Round-trip time in milliseconds |
+
+### Publisher Audio Object
+
+| Attribute | Type | Description |
+| ----------- | -------- | --------------------------------- |
+| track_id | `string` | UUID of the track |
+| time_series | `object` | Time-series metrics for the track |
+
+**Publisher audio `time_series` fields:**
+
+| Field | Type | Description |
+| ------------------- | ---------- | --------------------------------------- |
+| timestamps | `number[]` | Unix timestamps in milliseconds |
+| bitrate | `number[]` | Bitrate in bits per second |
+| packet_loss_percent | `number[]` | Packet loss as a percentage |
+| rtt_ms | `number[]` | Round-trip time in milliseconds |
+| jitter_ms | `number[]` | Jitter in milliseconds |
+
+### Subscriber Video Object
+
+| Attribute | Type | Description |
+| ----------- | -------- | --------------------------------------- |
+| track_id | `string` | UUID of the track |
+| type | `string` | Track type: `regular` or `screen` |
+| time_series | `object` | Time-series metrics for the track |
+
+**Subscriber video `time_series` fields:**
+
+| Field | Type | Description |
+| ------------ | ---------------- | --------------------------------------- |
+| timestamps | `number[]` | Unix timestamps in milliseconds |
+| bitrate | `number\|null[]` | Bitrate in bits per second, or `null` |
+| fps | `number[]` | Frames per second |
+| freeze_count | `number[]` | Number of video freezes |
+
+### Subscriber Audio Object
+
+| Attribute | Type | Description |
+| ----------- | -------- | --------------------------------- |
+| track_id | `string` | UUID of the track |
+| time_series | `object` | Time-series metrics for the track |
+
+**Subscriber audio `time_series` fields:**
+
+| Field | Type | Description |
+| ------------------- | ---------------- | -------------------------------------------- |
+| timestamps | `number[]` | Unix timestamps in milliseconds |
+| bitrate | `number\|null[]` | Bitrate in bits per second, or `null` |
+| concealed_samples | `number[]` | Number of concealed (interpolated) samples |
+| packet_loss_percent | `number[]` | Packet loss as a percentage |
+
+
+
+```json
+{
+ "peer_id": "1169b4b7-68c2-4d39-8568-5618433958ac",
+ "session_id": "6977b3f3cdd1e423f8b2cxxx",
+ "room_id": "6977b3f3cdd1e423f8b2xxxx",
+ "publisher": {
+ "video": [
+ {
+ "track_id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
+ "type": "regular",
+ "rid": "h",
+ "time_series": {
+ "timestamps": [1738843560000, 1738843570000],
+ "bitrate": [2500000, 2480000],
+ "fps": [30, 30],
+ "packet_loss_percent": [0.3, 0.2],
+ "rtt_ms": [45, 47]
+ }
+ },
+ {
+ "track_id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
+ "type": "regular",
+ "rid": "m",
+ "time_series": {
+ "timestamps": [1738843560000],
+ "bitrate": [1200000],
+ "fps": [30],
+ "packet_loss_percent": [0.15],
+ "rtt_ms": [45]
+ }
+ },
+ {
+ "track_id": "b2c3d4e5-f6a7-8901-bcde-f12345678901",
+ "type": "screen",
+ "rid": null,
+ "time_series": {
+ "timestamps": [1738843560000],
+ "bitrate": [800000],
+ "fps": [5],
+ "packet_loss_percent": [0.1],
+ "rtt_ms": [46]
+ }
+ }
+ ],
+ "audio": [
+ {
+ "track_id": "c3d4e5f6-a7b8-9012-cdef-123456789012",
+ "time_series": {
+ "timestamps": [1738843560000],
+ "bitrate": [64000],
+ "packet_loss_percent": [0.1],
+ "rtt_ms": [45],
+ "jitter_ms": [2.1]
+ }
+ }
+ ]
+ },
+ "subscriber": {
+ "video": [
+ {
+ "track_id": "4334da7c-1d73-4e19-9d63-4b0571f5acd4",
+ "type": "regular",
+ "time_series": {
+ "timestamps": [1738843560000],
+ "bitrate": [null],
+ "fps": [15],
+ "freeze_count": [0]
+ }
+ }
+ ],
+ "audio": [
+ {
+ "track_id": "fbc9c8f8-70ea-4d24-9699-12bf3eba94d8",
+ "time_series": {
+ "timestamps": [1738843560000],
+ "bitrate": [null],
+ "concealed_samples": [0],
+ "packet_loss_percent": [0.2]
+ }
+ }
+ ]
+ }
+}
+```
+
+
+
+## Notes
+
+- **Authentication**: Requires a management token in the `Authorization: Bearer ` header.
+- **Empty Data**: Arrays will be empty `[]` if no data is available for a given metric.
+- **Simulcast**: For publisher video tracks, the same `track_id` may appear multiple times with different `rid` values (`h`/`m`/`l`) representing different simulcast quality layers.
+
+## Postman collection
+
+You can use our Postman collection to start exploring 100ms APIs.
+
+[](https://god.gw.postman.com/run-collection/22726679-47dcd974-29d5-4965-a35b-bf9b74a8b25a?action=collection%2Ffork&collection-url=entityId%3D22726679-47dcd974-29d5-4965-a35b-bf9b74a8b25a%26entityType%3Dcollection%26workspaceId%3Dd9145dd6-337b-4761-81d6-21a30b4147a2)
+
+Refer to the [Postman guide](/server-side/v2/introduction/postman-guide) to get started with 100ms API collection.
From 95338b3538fefd198445fbfe3590d5cb9f9b571e Mon Sep 17 00:00:00 2001
From: Ravi theja
Date: Tue, 24 Feb 2026 14:11:23 +0530
Subject: [PATCH 27/28] docs: add JavaScript implementation to captions page
(#2510)
Add JS/React tabs to the captions documentation showing both the
vanilla JS approach (hmsNotifications.onNotification) and the React
hook approach (useTranscript) for receiving transcript messages.
---
.../set-up-video-conferencing/captions.mdx | 197 +++++++++++++-----
1 file changed, 146 insertions(+), 51 deletions(-)
diff --git a/docs/javascript/v2/how-to-guides/set-up-video-conferencing/captions.mdx b/docs/javascript/v2/how-to-guides/set-up-video-conferencing/captions.mdx
index 8b9b3389d..406fb93e1 100644
--- a/docs/javascript/v2/how-to-guides/set-up-video-conferencing/captions.mdx
+++ b/docs/javascript/v2/how-to-guides/set-up-video-conferencing/captions.mdx
@@ -11,84 +11,179 @@ nav: 1.24
## How to implement closed captioning?
-The `useTranscript` hook is implemented with the `onTranscript` callback as shown below:
+Each transcript entry has the following structure:
```ts
- export interface HMSTranscript {
- // start time in second
- start: number;
- // end time in seconds
- end: number;
- // peer_id of the speaker
- peer_id: string;
- // transcription will continue to update the transcript until you receive final keyword
- final: boolean;
- // closed caption
- transcript: string;
- }
+interface HMSTranscript {
+ start: number; // start time in seconds
+ end: number; // end time in seconds
+ peer_id: string; // peer_id of the speaker
+ final: boolean; // true when the transcript segment is finalized
+ transcript: string; // the caption text
+}
+```
+
+The transcription engine sends interim results as the speaker talks, then a final result once the segment is complete. Interim results update the same segment in place, while final results indicate that a new segment will follow.
+
+
+
+
+
+In plain JavaScript, transcript data arrives as `NEW_MESSAGE` notifications of type `hms_transcript`. Subscribe via `hmsNotifications.onNotification`:
+
+```js
+import { HMSNotificationTypes } from '@100mslive/hms-video-store';
- export interface useHMSTranscriptInput {
- onTranscript?: (data: HMSTranscript[]) => void;
- handleError?: hooksErrHandler;
+const unsubscribe = hmsNotifications.onNotification((notification) => {
+ const msg = notification.data;
+ if (msg && msg.type === 'hms_transcript') {
+ const parsed = JSON.parse(msg.message);
+ const transcripts = parsed.results; // HMSTranscript[]
+
+ transcripts.forEach((entry) => {
+ console.log(
+ `${entry.peer_id}: ${entry.transcript} (final: ${entry.final})`
+ );
+ });
}
+}, HMSNotificationTypes.NEW_MESSAGE);
+
+// call unsubscribe() when you no longer need transcript updates
+```
+
+To resolve the `peer_id` to a display name, use the store:
- export const useTranscript = ({ onTranscript, handleError = logErrorHandler }: useHMSTranscriptInput);
+```js
+import { selectPeerNameByID } from '@100mslive/hms-video-store';
+const peerName = hmsStore.getState(selectPeerNameByID(entry.peer_id));
```
+
+
+
+
+Use the `useTranscript` hook with the `onTranscript` callback:
+
+```jsx
+import { useTranscript } from '@100mslive/react-sdk';
+
+function CaptionsViewer() {
+ useTranscript({
+ onTranscript: (transcripts) => {
+ // transcripts is HMSTranscript[]
+ transcripts.forEach((entry) => {
+ console.log(
+ `${entry.peer_id}: ${entry.transcript} (final: ${entry.final})`
+ );
+ });
+ },
+ });
+
+ return null;
+}
+```
+
+
+
## How can you check if closed captions are enabled in a room?
-```ts
+
+
+
+
+```js
+import { selectIsTranscriptionEnabled } from '@100mslive/hms-video-store';
+
+// read once
+const isCaptionEnabled = hmsStore.getState(selectIsTranscriptionEnabled);
+
+// or subscribe to changes
+hmsStore.subscribe((enabled) => {
+ console.log('Captions enabled:', enabled);
+}, selectIsTranscriptionEnabled);
+```
+
+
+
+
+
+```jsx
import { selectIsTranscriptionEnabled, useHMSStore } from '@100mslive/react-sdk';
-// use this to check if caption is enabled for your room.
-const isCaptionPresent: boolean = useHMSStore(selectIsTranscriptionEnabled);
+
+function CaptionStatus() {
+ const isCaptionEnabled = useHMSStore(selectIsTranscriptionEnabled);
+ return {isCaptionEnabled ? 'Captions ON' : 'Captions OFF'};
+}
```
+
+
## How to toggle closed captions on or off?
Closed captions can be dynamically enabled or disabled at runtime within a given room, depending on user requirements. This capability helps minimize unnecessary usage costs by ensuring that captions are enabled only when explicitly needed by the user(s).
-```ts
-// Currently 100ms supports closed captions type mode
-export declare enum HMSTranscriptionMode {
- CAPTION = 'caption'
-}
+### Check permission
-export interface TranscriptionConfig {
- mode: HMSTranscriptionMode;
-}
+Before starting or stopping transcription, verify that the local peer has the required permission:
+
+
+
+
+
+```js
+import {
+ HMSTranscriptionMode,
+ selectIsTranscriptionAllowedByMode
+} from '@100mslive/hms-video-store';
+
+const isTranscriptionAllowed = hmsStore.getState(
+ selectIsTranscriptionAllowedByMode(HMSTranscriptionMode.CAPTION)
+);
+```
+
+
+
+
+
+```jsx
+import {
+ HMSTranscriptionMode,
+ selectIsTranscriptionAllowedByMode,
+ useHMSStore
+} from '@100mslive/react-sdk';
-// admin/host role need to startTranscription if he had the access, here is how you will check if you had access to start transcription
const isTranscriptionAllowed = useHMSStore(
selectIsTranscriptionAllowedByMode(HMSTranscriptionMode.CAPTION)
);
```
-Use `hmsActions.startTranscription()` method to start the closed captions.
+
-```ts
- async startCaption() {
- try {
- await hmsActions.startTranscription({
- mode: HMSTranscriptionMode.CAPTION,
- });
- } catch(err) {
- console.log(err);
- }
- }
+### Start captions
+Use `hmsActions.startTranscription()` to enable closed captions for the room:
+
+```js
+try {
+ await hmsActions.startTranscription({
+ mode: HMSTranscriptionMode.CAPTION,
+ });
+} catch (err) {
+ console.error('Failed to start captions:', err);
+}
```
-Use `hmsActions.stopTranscription()` method to stop closed captions.
+### Stop captions
-```ts
- async stopCaption() {
- try {
- await hmsActions.stopTranscription({
- mode: HMSTranscriptionMode.CAPTION,
- });
- } catch(err) {
- console.log(err);
- }
- }
+Use `hmsActions.stopTranscription()` to disable closed captions:
+
+```js
+try {
+ await hmsActions.stopTranscription({
+ mode: HMSTranscriptionMode.CAPTION,
+ });
+} catch (err) {
+ console.error('Failed to stop captions:', err);
+}
```
From 0529c28995107fed7f55362e93c0652a3fea9027 Mon Sep 17 00:00:00 2001
From: Ravi theja
Date: Thu, 5 Mar 2026 10:46:40 +0530
Subject: [PATCH 28/28] Fix formatting in stop-recording-by-id documentation
---
.../v2/api-reference/recordings/stop-recording-by-id.mdx | 1 +
1 file changed, 1 insertion(+)
diff --git a/docs/server-side/v2/api-reference/recordings/stop-recording-by-id.mdx b/docs/server-side/v2/api-reference/recordings/stop-recording-by-id.mdx
index 03e03dffb..c73b7086d 100644
--- a/docs/server-side/v2/api-reference/recordings/stop-recording-by-id.mdx
+++ b/docs/server-side/v2/api-reference/recordings/stop-recording-by-id.mdx
@@ -40,3 +40,4 @@ curl --location --request POST 'https://api.100ms.live/v2/recordings/
+