diff --git a/packages/react-native-sdk/package.json b/packages/react-native-sdk/package.json index 6caad7742f..4602a1c0e5 100644 --- a/packages/react-native-sdk/package.json +++ b/packages/react-native-sdk/package.json @@ -65,7 +65,7 @@ "@react-native-firebase/messaging": ">=17.5.0", "@stream-io/noise-cancellation-react-native": ">=0.1.0", "@stream-io/react-native-callingx": ">=0.1.0", - "@stream-io/react-native-webrtc": ">=137.1.3", + "@stream-io/react-native-webrtc": ">=137.2.0", "@stream-io/video-filters-react-native": ">=0.1.0", "expo": ">=47.0.0", "expo-build-properties": "*", @@ -127,7 +127,7 @@ "@react-native/babel-preset": "^0.81.5", "@stream-io/noise-cancellation-react-native": "workspace:^", "@stream-io/react-native-callingx": "workspace:^", - "@stream-io/react-native-webrtc": "137.1.3", + "@stream-io/react-native-webrtc": "137.2.0", "@stream-io/video-filters-react-native": "workspace:^", "@testing-library/jest-native": "^5.4.3", "@testing-library/react-native": "13.3.3", diff --git a/packages/react-native-sdk/src/contexts/BackgroundFilters.tsx b/packages/react-native-sdk/src/contexts/BackgroundFilters.tsx index 2464404543..e2461d2baa 100644 --- a/packages/react-native-sdk/src/contexts/BackgroundFilters.tsx +++ b/packages/react-native-sdk/src/contexts/BackgroundFilters.tsx @@ -2,6 +2,7 @@ import React, { type PropsWithChildren, useCallback, useContext, + useEffect, useMemo, useRef, useState, @@ -76,6 +77,10 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => { const isBackgroundBlurRegisteredRef = useRef(false); const isVideoBlurRegisteredRef = useRef(false); const registeredImageFiltersSetRef = useRef(new Set()); + // The currently applied native filter name. Used to reapply on track + // replacement, and as a staleness signal so a later apply/disable can + // invalidate an in-flight apply() call. + const lastAppliedFilterNameRef = useRef(null); const [currentBackgroundFilter, setCurrentBackgroundFilter] = useState(); @@ -85,16 +90,19 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => { if (!isSupported) { return; } - if (!isBackgroundBlurRegisteredRef.current) { - await videoFiltersModule?.registerBackgroundBlurVideoFilters(); - isBackgroundBlurRegisteredRef.current = true; - } let filterName = 'BackgroundBlurMedium'; if (blurIntensity === 'heavy') { filterName = 'BackgroundBlurHeavy'; } else if (blurIntensity === 'light') { filterName = 'BackgroundBlurLight'; } + // Set before awaiting so a later apply/disable can mark this call stale. + lastAppliedFilterNameRef.current = filterName; + if (!isBackgroundBlurRegisteredRef.current) { + await videoFiltersModule?.registerBackgroundBlurVideoFilters(); + if (lastAppliedFilterNameRef.current !== filterName) return; + isBackgroundBlurRegisteredRef.current = true; + } call?.tracer.trace('backgroundFilters.apply', filterName); (call?.camera.state.mediaStream as MediaStream | undefined) ?.getVideoTracks() @@ -111,16 +119,18 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => { if (!isSupported) { return; } - if (!isVideoBlurRegisteredRef.current) { - await videoFiltersModule?.registerBlurVideoFilters(); - isVideoBlurRegisteredRef.current = true; - } let filterName = 'BlurMedium'; if (blurIntensity === 'heavy') { filterName = 'BlurHeavy'; } else if (blurIntensity === 'light') { filterName = 'BlurLight'; } + lastAppliedFilterNameRef.current = filterName; + if (!isVideoBlurRegisteredRef.current) { + await videoFiltersModule?.registerBlurVideoFilters(); + if (lastAppliedFilterNameRef.current !== filterName) return; + isVideoBlurRegisteredRef.current = true; + } call?.tracer.trace('videoFilters.apply', filterName); (call?.camera.state.mediaStream as MediaStream | undefined) ?.getVideoTracks() @@ -139,12 +149,14 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => { } const source = Image.resolveAssetSource(imageSource); const imageUri = source.uri; + const filterName = `VirtualBackground-${imageUri}`; + lastAppliedFilterNameRef.current = filterName; const registeredImageFiltersSet = registeredImageFiltersSetRef.current; if (!registeredImageFiltersSet.has(imageUri)) { await videoFiltersModule?.registerVirtualBackgroundFilter(imageSource); + if (lastAppliedFilterNameRef.current !== filterName) return; registeredImageFiltersSetRef.current.add(imageUri); } - const filterName = `VirtualBackground-${imageUri}`; call?.tracer.trace('backgroundFilters.apply', filterName); (call?.camera.state.mediaStream as MediaStream | undefined) ?.getVideoTracks() @@ -161,6 +173,8 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => { return; } call?.tracer.trace('backgroundFilters.disableAll', null); + // Clearing the ref invalidates any in-flight apply — its stale check will bail. + lastAppliedFilterNameRef.current = null; (call?.camera.state.mediaStream as MediaStream | undefined) ?.getVideoTracks() .forEach((track) => { @@ -169,6 +183,38 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => { setCurrentBackgroundFilter(undefined); }, [call]); + // Reapplies the filter on track replacement (flip, enable-after-disable). + // Releases native filter state on unmount / call change. + useEffect(() => { + if (!call || !isSupported) return; + const registeredImageFiltersSet = registeredImageFiltersSetRef.current; + const subscription = call.camera.state.mediaStream$.subscribe(() => { + const name = lastAppliedFilterNameRef.current; + if (!name) return; + (call.camera.state.mediaStream as MediaStream | undefined) + ?.getVideoTracks() + .forEach((track) => { + track._setVideoEffect(name); + }); + }); + return () => { + subscription.unsubscribe(); + (call.camera.state.mediaStream as MediaStream | undefined) + ?.getVideoTracks() + .forEach((track) => { + track._setVideoEffect(null); + }); + // Drop native processor refs so they can be deallocated. Otherwise the + // ProcessorProvider registry holds them for the app's lifetime. + videoFiltersModule?.unregisterAllFilters?.().catch(() => {}); + lastAppliedFilterNameRef.current = null; + isBackgroundBlurRegisteredRef.current = false; + isVideoBlurRegisteredRef.current = false; + registeredImageFiltersSet.clear(); + setCurrentBackgroundFilter(undefined); + }; + }, [call]); + const value = useMemo( () => ({ currentBackgroundFilter, diff --git a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/VideoFiltersReactNativeModule.kt b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/VideoFiltersReactNativeModule.kt index ec2dc082b9..b1efdf4be4 100644 --- a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/VideoFiltersReactNativeModule.kt +++ b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/VideoFiltersReactNativeModule.kt @@ -10,6 +10,10 @@ import com.streamio.videofiltersreactnative.factories.* class VideoFiltersReactNativeModule(reactContext: ReactApplicationContext) : ReactContextBaseJavaModule(reactContext) { + // Names we add to the global ProcessorProvider, so unregisterAllFilters can + // release them. Otherwise factories accumulate for the app's lifetime. + private val registeredNames = mutableSetOf() + override fun getName(): String { return NAME } @@ -36,15 +40,18 @@ class VideoFiltersReactNativeModule(reactContext: ReactApplicationContext) : "BackgroundBlurHeavy", BackgroundBlurFactory(BlurIntensity.HEAVY) ) + registeredNames.addAll(listOf("BackgroundBlurLight", "BackgroundBlurMedium", "BackgroundBlurHeavy")) promise.resolve(true) } @ReactMethod fun registerVirtualBackgroundFilter(backgroundImageUrlString: String, promise: Promise) { + val name = "VirtualBackground-$backgroundImageUrlString" ProcessorProvider.addProcessor( - "VirtualBackground-$backgroundImageUrlString", + name, VirtualBackgroundFactory(reactApplicationContext, backgroundImageUrlString) ) + registeredNames.add(name) promise.resolve(true) } @@ -53,9 +60,19 @@ class VideoFiltersReactNativeModule(reactContext: ReactApplicationContext) : ProcessorProvider.addProcessor("BlurLight", VideoBlurFactory(VideoBlurIntensity.LIGHT)) ProcessorProvider.addProcessor("BlurMedium", VideoBlurFactory(VideoBlurIntensity.MEDIUM)) ProcessorProvider.addProcessor("BlurHeavy", VideoBlurFactory(VideoBlurIntensity.HEAVY)) + registeredNames.addAll(listOf("BlurLight", "BlurMedium", "BlurHeavy")) + promise.resolve(true) + } + + @ReactMethod + fun unregisterAllFilters(promise: Promise) { + for (name in registeredNames) { + ProcessorProvider.removeProcessor(name) + } + registeredNames.clear() promise.resolve(true) } - + companion object { const val NAME = "VideoFiltersReactNative" } diff --git a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/BitmapVideoFilter.kt b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/BitmapVideoFilter.kt index cd7c001f96..8cbabc9983 100644 --- a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/BitmapVideoFilter.kt +++ b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/BitmapVideoFilter.kt @@ -8,4 +8,6 @@ import android.graphics.Bitmap */ abstract class BitmapVideoFilter { abstract fun applyFilter(videoFrameBitmap: Bitmap) + + open fun close() {} } diff --git a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/VideoFrameWithBitmapFilter.kt b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/VideoFrameWithBitmapFilter.kt index 6025ce0b33..b3f7e23b65 100644 --- a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/VideoFrameWithBitmapFilter.kt +++ b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/VideoFrameWithBitmapFilter.kt @@ -18,6 +18,7 @@ import org.webrtc.YuvConverter class VideoFrameProcessorWithBitmapFilter(bitmapVideoFilterFunc: () -> BitmapVideoFilter) : VideoFrameProcessor { private val yuvConverter = YuvConverter() + private val yuvFrame = YuvFrame() private var inputWidth = 0 private var inputHeight = 0 private var inputBuffer: VideoFrame.TextureBuffer? = null @@ -25,9 +26,8 @@ class VideoFrameProcessorWithBitmapFilter(bitmapVideoFilterFunc: () -> BitmapVid private val textures = IntArray(1) private var inputFrameBitmap: Bitmap? = null - private val bitmapVideoFilter by lazy { - bitmapVideoFilterFunc.invoke() - } + private val bitmapVideoFilterLazy = lazy { bitmapVideoFilterFunc.invoke() } + private val bitmapVideoFilter: BitmapVideoFilter by bitmapVideoFilterLazy init { GLES20.glGenTextures(1, textures, 0) @@ -35,7 +35,7 @@ class VideoFrameProcessorWithBitmapFilter(bitmapVideoFilterFunc: () -> BitmapVid override fun process(frame: VideoFrame, surfaceTextureHelper: SurfaceTextureHelper): VideoFrame { // Step 1: Video Frame to Bitmap - val inputFrameBitmap = YuvFrame.bitmapFromVideoFrame(frame) ?: return frame + val inputFrameBitmap = yuvFrame.bitmapFromVideoFrame(frame) ?: return frame // Prepare helpers (runs only once or if the dimensions change) initialize( @@ -66,9 +66,6 @@ class VideoFrameProcessorWithBitmapFilter(bitmapVideoFilterFunc: () -> BitmapVid } private fun initialize(width: Int, height: Int, textureHelper: SurfaceTextureHelper) { - // TODO: temporarily disabled due to crash: java.lang.IllegalStateException: release() called on an object with refcount < 1 -// yuvBuffer?.release() - if (this.inputWidth != width || this.inputHeight != height) { Log.d(TAG, "initialize - width: $width height: $height") this.inputWidth = width @@ -92,6 +89,25 @@ class VideoFrameProcessorWithBitmapFilter(bitmapVideoFilterFunc: () -> BitmapVid } } + // Runs on the GL thread serialized with process() — inline cleanup is safe. + // Always delete the texture: glGenTextures in init can return a valid id even + // before initialize() runs, so enable-then-disable would leak it otherwise. + override fun dispose() { + if (bitmapVideoFilterLazy.isInitialized()) { + bitmapVideoFilter.close() + } + yuvFrame.close() + inputFrameBitmap?.recycle() + inputFrameBitmap = null + yuvConverter.release() + inputBuffer?.release() + inputBuffer = null + if (textures[0] != 0) { + GLES20.glDeleteTextures(1, intArrayOf(textures[0]), 0) + textures[0] = 0 + } + } + companion object { private const val TAG = "VideoFrameProcessorWithBitmapFilter" } diff --git a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/YuvFrame.kt b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/YuvFrame.kt index 691bd4c89e..ddd5c81abe 100644 --- a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/YuvFrame.kt +++ b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/common/YuvFrame.kt @@ -9,9 +9,7 @@ import io.github.crow_misia.libyuv.RotateMode import io.github.crow_misia.libyuv.RowStride import org.webrtc.VideoFrame -object YuvFrame { - private const val TAG = "YuvFrame" - +class YuvFrame { private lateinit var webRtcI420Buffer: VideoFrame.I420Buffer private lateinit var libYuvI420Buffer: I420Buffer private var libYuvRotatedI420Buffer: I420Buffer? = null @@ -94,4 +92,16 @@ object YuvFrame { webRtcI420Buffer.release() // Rest of buffers are closed in the methods above } + + /** Frees the libyuv buffers. GC alone won't reclaim their off-heap memory. Idempotent. */ + fun close() { + libYuvRotatedI420Buffer?.close() + libYuvRotatedI420Buffer = null + libYuvAbgrBuffer?.close() + libYuvAbgrBuffer = null + } + + companion object { + private const val TAG = "YuvFrame" + } } diff --git a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/factories/BackgroundBlurFactory.kt b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/factories/BackgroundBlurFactory.kt index 50b4fe0712..a75e5f9440 100644 --- a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/factories/BackgroundBlurFactory.kt +++ b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/factories/BackgroundBlurFactory.kt @@ -149,6 +149,15 @@ private class BlurredBackgroundVideoFilter( blurredBackgroundBitmap.recycle() } + // Free the ML Kit segmenter and cached bitmaps held by the filter. + override fun close() { + segmenter.close() + backgroundMaskBitmap?.recycle() + backgroundMaskBitmap = null + downScaledBackgroundBitmap?.recycle() + downScaledBackgroundBitmap = null + } + private fun maybeInit(videoFrameBitmap: Bitmap, mask: SegmentationMask) { var createScale = false if (currentFrameWidth != videoFrameBitmap.width || currentFrameHeight != videoFrameBitmap.height) { diff --git a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/factories/VirtualBackgroundFactory.kt b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/factories/VirtualBackgroundFactory.kt index bb64a7ab11..c04564151c 100644 --- a/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/factories/VirtualBackgroundFactory.kt +++ b/packages/video-filters-react-native/android/src/main/java/com/streamio/videofiltersreactnative/factories/VirtualBackgroundFactory.kt @@ -44,15 +44,9 @@ class VirtualBackgroundFactory( override fun build(): VideoFrameProcessor { return VideoFrameProcessorWithBitmapFilter { - VirtualBackgroundVideoFilter( - reactContext, backgroundImageUrlString, foregroundThreshold - ) + VirtualBackgroundVideoFilter(reactContext, backgroundImageUrlString, foregroundThreshold) } } - - companion object { - private const val TAG = "VirtualBackgroundFactory" - } } /** @@ -63,10 +57,55 @@ class VirtualBackgroundFactory( */ @Keep private class VirtualBackgroundVideoFilter( - reactContext: ReactApplicationContext, - backgroundImageUrlString: String, + private val reactContext: ReactApplicationContext, + private val backgroundImageUrlString: String, foregroundThreshold: Double = DEFAULT_FOREGROUND_THRESHOLD, ) : BitmapVideoFilter() { + // Loaded off-thread so a slow URL doesn't block the capture thread. + // Frames arriving before the load finishes fall through unfiltered. + @Volatile + private var virtualBackgroundBitmap: Bitmap? = null + + // Guards against the load thread writing virtualBackgroundBitmap after close(). + @Volatile + private var isClosed = false + + init { + Thread { loadBackgroundImage() }.start() + } + + private fun loadBackgroundImage() { + val bitmap: Bitmap? = try { + val uri = Uri.parse(backgroundImageUrlString) + if (uri.scheme == null) { // this is a local image + val drawableId = ResourceDrawableIdHelper.getInstance() + .getResourceDrawableId(reactContext, backgroundImageUrlString) + BitmapFactory.decodeResource(reactContext.resources, drawableId) + } else { + val connection = URL(backgroundImageUrlString).openConnection().apply { + connectTimeout = REMOTE_IMAGE_TIMEOUT_MS + readTimeout = REMOTE_IMAGE_TIMEOUT_MS + } + connection.getInputStream().use { stream -> + BitmapFactory.decodeStream(stream) + } + } + } catch (e: IOException) { + // URLs may carry signed-access query tokens; log only the host. + val host = Uri.parse(backgroundImageUrlString).host ?: "local" + Log.e(TAG, "cant get bitmap for image (host=$host)", e) + null + } + + synchronized(this) { + if (isClosed) { + bitmap?.recycle() + return + } + virtualBackgroundBitmap = bitmap + } + } + private val options = SelfieSegmenterOptions.Builder().setDetectorMode(SelfieSegmenterOptions.STREAM_MODE) .enableRawSizeMask().build() @@ -93,23 +132,6 @@ private class VirtualBackgroundVideoFilter( } - private val virtualBackgroundBitmap by lazy { - try { - val uri = Uri.parse(backgroundImageUrlString) - if (uri.scheme == null) { // this is a local image - val drawableId = ResourceDrawableIdHelper.getInstance() - .getResourceDrawableId(reactContext, backgroundImageUrlString) - BitmapFactory.decodeResource(reactContext.resources, drawableId) - } else { - val url = URL(backgroundImageUrlString) - BitmapFactory.decodeStream(url.openConnection().getInputStream()) - } - } catch (e: IOException) { - Log.e(TAG, "cant get bitmap for image url: $backgroundImageUrlString", e) - null - } - } - private val foregroundPaint by lazy { // destination - video frame // source - black mask bitmap of person cutout @@ -184,6 +206,22 @@ private class VirtualBackgroundVideoFilter( canvas.drawBitmap(scaledVirtualBackgroundBitmap!!, 0f, 0f, backgroundPaint) } + // Free the ML Kit segmenter and cached bitmaps held by the filter. + override fun close() { + synchronized(this) { + isClosed = true + virtualBackgroundBitmap?.recycle() + virtualBackgroundBitmap = null + } + segmenter.close() + scaledVirtualBackgroundBitmap?.recycle() + scaledVirtualBackgroundBitmap = null + foregroundMaskBitmap?.recycle() + foregroundMaskBitmap = null + scaledForegroundBitmap?.recycle() + scaledForegroundBitmap = null + } + private fun scaleVirtualBackgroundBitmap(bitmap: Bitmap, targetHeight: Int): Bitmap { val scale = targetHeight.toFloat() / bitmap.height return ensureAlpha( @@ -260,6 +298,7 @@ private class VirtualBackgroundVideoFilter( companion object { private const val TAG = "VirtualBackgroundVideoFilter" + private const val REMOTE_IMAGE_TIMEOUT_MS = 10_000 } } diff --git a/packages/video-filters-react-native/ios/VideoFiltersReactNative.mm b/packages/video-filters-react-native/ios/VideoFiltersReactNative.mm index 04957fc044..e11598ab3c 100644 --- a/packages/video-filters-react-native/ios/VideoFiltersReactNative.mm +++ b/packages/video-filters-react-native/ios/VideoFiltersReactNative.mm @@ -12,6 +12,9 @@ @interface RCT_EXTERN_MODULE(VideoFiltersReactNative, NSObject) RCT_EXTERN_METHOD(registerBlurVideoFilters:(RCTPromiseResolveBlock)resolve withRejecter:(RCTPromiseRejectBlock)reject) +RCT_EXTERN_METHOD(unregisterAllFilters:(RCTPromiseResolveBlock)resolve + withRejecter:(RCTPromiseRejectBlock)reject) + + (BOOL)requiresMainQueueSetup { return NO; diff --git a/packages/video-filters-react-native/ios/VideoFiltersReactNative.swift b/packages/video-filters-react-native/ios/VideoFiltersReactNative.swift index dbc329411c..725fdd0e27 100644 --- a/packages/video-filters-react-native/ios/VideoFiltersReactNative.swift +++ b/packages/video-filters-react-native/ios/VideoFiltersReactNative.swift @@ -1,19 +1,26 @@ @objc(VideoFiltersReactNative) class VideoFiltersReactNative: NSObject { - + + // Names we add to the global ProcessorProvider, so unregisterAllFilters can + // release them. Otherwise the processors live for the app's lifetime. + private static var registeredNames = Set() + @available(iOS 15.0, *) @objc(registerBackgroundBlurVideoFilters:withRejecter:) func registerBackgroundBlurVideoFilters(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) { ProcessorProvider.addProcessor(BlurBackgroundVideoFrameProcessor(blurIntensity: BlurIntensity.light), forName: "BackgroundBlurLight") ProcessorProvider.addProcessor(BlurBackgroundVideoFrameProcessor(blurIntensity: BlurIntensity.medium), forName: "BackgroundBlurMedium") ProcessorProvider.addProcessor(BlurBackgroundVideoFrameProcessor(blurIntensity: BlurIntensity.heavy), forName: "BackgroundBlurHeavy") + Self.registeredNames.formUnion(["BackgroundBlurLight", "BackgroundBlurMedium", "BackgroundBlurHeavy"]) resolve(true) } - + @available(iOS 15.0, *) @objc(registerVirtualBackgroundFilter:withResolver:withRejecter:) func registerVirtualBackgroundFilter(backgroundImageUrlString: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) { - ProcessorProvider.addProcessor(ImageBackgroundVideoFrameProcessor(backgroundImageUrlString), forName: "VirtualBackground-\(backgroundImageUrlString)") + let name = "VirtualBackground-\(backgroundImageUrlString)" + ProcessorProvider.addProcessor(ImageBackgroundVideoFrameProcessor(backgroundImageUrlString), forName: name) + Self.registeredNames.insert(name) resolve(true) } @@ -23,6 +30,16 @@ class VideoFiltersReactNative: NSObject { ProcessorProvider.addProcessor(BlurVideoFrameProcessor(blurIntensity: VideoBlurIntensity.light), forName: "BlurLight") ProcessorProvider.addProcessor(BlurVideoFrameProcessor(blurIntensity: VideoBlurIntensity.medium), forName: "BlurMedium") ProcessorProvider.addProcessor(BlurVideoFrameProcessor(blurIntensity: VideoBlurIntensity.heavy), forName: "BlurHeavy") + Self.registeredNames.formUnion(["BlurLight", "BlurMedium", "BlurHeavy"]) resolve(true) - } + } + + @objc(unregisterAllFilters:withRejecter:) + func unregisterAllFilters(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) { + for name in Self.registeredNames { + ProcessorProvider.removeProcessor(name) + } + Self.registeredNames.removeAll() + resolve(true) + } } diff --git a/packages/video-filters-react-native/ios/VideoFrameProcessors/BlurBackgroundVideoFrameProcessor.swift b/packages/video-filters-react-native/ios/VideoFrameProcessors/BlurBackgroundVideoFrameProcessor.swift index f838de6da5..48ecaae463 100644 --- a/packages/video-filters-react-native/ios/VideoFrameProcessors/BlurBackgroundVideoFrameProcessor.swift +++ b/packages/video-filters-react-native/ios/VideoFrameProcessors/BlurBackgroundVideoFrameProcessor.swift @@ -19,10 +19,21 @@ final class BlurBackgroundVideoFrameProcessor: VideoFilter { filter: { input in input.originalImage } ) - self.filter = { input in - // https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/index.html#//apple_ref/doc/filter/ci/CIGaussianBlur - let backgroundImage = input.originalImage.applyingFilter("CIGaussianBlur", parameters: self.blurParameters) - + self.filter = { [weak self] input in + // `[weak self]`: the closure is stored on `self` — a strong capture would leak the processor. + guard let self = self else { return input.originalImage } + // Blur at half resolution for speed. clampedToExtent + cropped(to:) + // keep the result at the original extent; without them the background + // drifts relative to the foreground. + let originalExtent = input.originalImage.extent + let halfSize = CGSize(width: originalExtent.width / 2, height: originalExtent.height / 2) + let downscaled = input.originalImage.resize(halfSize) ?? input.originalImage + let blurred = downscaled + .clampedToExtent() + .applyingFilter("CIGaussianBlur", parameters: self.blurParameters) + .cropped(to: downscaled.extent) + let backgroundImage = blurred.resize(originalExtent.size) ?? blurred + return self.backgroundImageFilterProcessor .applyFilter( input.originalPixelBuffer, diff --git a/packages/video-filters-react-native/ios/VideoFrameProcessors/ImageBackgroundVideoFrameProcessor.swift b/packages/video-filters-react-native/ios/VideoFrameProcessors/ImageBackgroundVideoFrameProcessor.swift index eafc5a5057..cd22bffbdf 100644 --- a/packages/video-filters-react-native/ios/VideoFrameProcessors/ImageBackgroundVideoFrameProcessor.swift +++ b/packages/video-filters-react-native/ios/VideoFrameProcessors/ImageBackgroundVideoFrameProcessor.swift @@ -27,53 +27,86 @@ final class ImageBackgroundVideoFrameProcessor: VideoFilter { private var cachedValue: CacheValue? private var backgroundImageUrl: String - + private lazy var backgroundImageFilterProcessor = { return BackgroundImageFilterProcessor() }() - - private lazy var backgroundCIImage: CIImage? = { - var bgUIImage: UIImage? - if let url = URL(string: backgroundImageUrl) { - // check if its a local asset - bgUIImage = RCTImageFromLocalAssetURL(url) - if (bgUIImage == nil) { - // if its not a local asset, then try to get it as a remote asset - if let data = try? Data(contentsOf: url) { - bgUIImage = UIImage(data: data) - } else { - NSLog("Failed to convert uri to image: -\(backgroundImageUrl)") - } - } - } - if (bgUIImage != nil) { - return CIImage.init(image: bgUIImage!) - } - return nil - }() - + + // Loaded on a background queue so a slow URL doesn't block the capture thread. + // NSLock because the load thread writes it and the capture thread reads it. + private let backgroundImageLock = NSLock() + private var _backgroundCIImage: CIImage? + private var backgroundImageTask: URLSessionDataTask? + + private var backgroundCIImage: CIImage? { + backgroundImageLock.lock() + defer { backgroundImageLock.unlock() } + return _backgroundCIImage + } + @available(*, unavailable) override public init( filter: @escaping (Input) -> CIImage ) { fatalError() } - + init(_ backgroundImageUrl: String) { self.backgroundImageUrl = backgroundImageUrl super.init( filter: { input in input.originalImage } ) - - self.filter = { input in - guard let bgImage = self.backgroundCIImage else { return input.originalImage } + + DispatchQueue.global(qos: .userInitiated).async { [weak self] in + self?.loadBackgroundImage() + } + + self.filter = { [weak self] input in + // `[weak self]`: the closure is stored on `self` — a strong capture would leak the processor. + guard let self = self, let bgImage = self.backgroundCIImage else { return input.originalImage } let cachedBackgroundImage = self.backgroundImage(image: bgImage, originalImage: input.originalImage, originalImageOrientation: input.originalImageOrientation) - + let outputImage: CIImage = self.backgroundImageFilterProcessor .applyFilter( input.originalPixelBuffer, backgroundImage: cachedBackgroundImage ) ?? input.originalImage - + return outputImage } } + + private func loadBackgroundImage() { + guard let url = URL(string: backgroundImageUrl) else { return } + if let bgUIImage = RCTImageFromLocalAssetURL(url) { + setBackgroundImage(bgUIImage) + return + } + // Bounded timeout (matches Android's 10s) so a hanging remote URL + // doesn't keep this processor alive for the OS-default ~75s. + var request = URLRequest(url: url) + request.timeoutInterval = 10 + let task = URLSession.shared.dataTask(with: request) { [weak self] data, _, _ in + // `[weak self]`: if the processor is released while the task is in flight + // (deinit calls cancel()), the closure no-ops and the task is freed. + guard let self = self else { return } + guard let data = data, let bgUIImage = UIImage(data: data) else { + // URLs may carry signed-access query tokens; log only the host. + let host = url.host ?? "local" + NSLog("Failed to load virtual-background image (host=\(host))") + return + } + self.setBackgroundImage(bgUIImage) + } + backgroundImageTask = task + task.resume() + } + + private func setBackgroundImage(_ image: UIImage) { + backgroundImageLock.lock() + _backgroundCIImage = CIImage(image: image) + backgroundImageLock.unlock() + } + + deinit { + backgroundImageTask?.cancel() + } /// Returns the cached or processed background image for a given original image (frame image). private func backgroundImage(image: CIImage, originalImage: CIImage, originalImageOrientation: CGImagePropertyOrientation) -> CIImage { diff --git a/packages/video-filters-react-native/ios/VideoFrameProcessors/Utils/BackgroundImageFilterProcessor.swift b/packages/video-filters-react-native/ios/VideoFrameProcessors/Utils/BackgroundImageFilterProcessor.swift index d74c88518f..ce47c91696 100644 --- a/packages/video-filters-react-native/ios/VideoFrameProcessors/Utils/BackgroundImageFilterProcessor.swift +++ b/packages/video-filters-react-native/ios/VideoFrameProcessors/Utils/BackgroundImageFilterProcessor.swift @@ -7,17 +7,30 @@ import CoreImage.CIFilterBuiltins import Foundation import Vision -/// Processes a video frame to create a new image with a custom background. +/// Blends a video frame with a custom background using a Vision-generated mask. /// -/// This class generates a person segmentation mask using Vision, scales the mask -/// to match the video frame size, and blends the original image with a provided -/// background image using the mask. This allows for effects like background -/// replacement or blurring. +/// Segmentation runs asynchronously: each `applyFilter` call composites with the last +/// completed mask and only kicks a new Vision request if one isn't already in flight. +/// This keeps the capture thread unblocked at the cost of ≤1–2 frames of mask staleness, +/// which is imperceptible in practice (Android uses the same pattern with ML Kit). @available(iOS 15.0, *) final class BackgroundImageFilterProcessor { + private static let segmentationTargetHeight: CGFloat = 540 + private let requestHandler = VNSequenceRequestHandler() private let request: VNGeneratePersonSegmentationRequest + // Async segmentation pipeline. `ciContext` snapshots `CIImage`s to `CGImage`s so + // Vision doesn't share storage with the camera buffer pool or its own pooled result + // buffers. `segQueue` serialises Vision calls — `VNSequenceRequestHandler` isn't + // thread-safe under concurrent use. `segLock` guards `lastMask` and `inFlight`, + // both shared between the capture thread and `segQueue`. + private let ciContext = CIContext(options: [.useSoftwareRenderer: false]) + private let segQueue = DispatchQueue(label: "io.getstream.video.segmentation", qos: .userInitiated) + private let segLock = NSLock() + private var lastMask: CIImage? + private var inFlight = false + /// Initializes a new `BackgroundImageFilterProcessor` instance. /// @@ -39,36 +52,94 @@ final class BackgroundImageFilterProcessor { /// - Parameters: /// - buffer: The video frame to process as a `CVPixelBuffer`. /// - backgroundImage: The background image to blend with the foreground. - /// - Returns: A new `CIImage` with the processed frame, or `nil` if an error occurs. + /// - Returns: The blended `CIImage`. If no mask is ready yet (typical for the first + /// 1–2 frames of a session), returns `originalImage` as a pass-through. Returns + /// `nil` if the blend filter itself fails. func applyFilter( _ buffer: CVPixelBuffer, backgroundImage: CIImage ) -> CIImage? { - do { - try requestHandler.perform([request], on: buffer) + let originalImage = CIImage(cvPixelBuffer: buffer) + + segLock.lock() + let mask = lastMask + let shouldDispatch = !inFlight + if shouldDispatch { + inFlight = true + } + segLock.unlock() - if let maskPixelBuffer = request.results?.first?.pixelBuffer { - let originalImage = CIImage(cvPixelBuffer: buffer) - var maskImage = CIImage(cvPixelBuffer: maskPixelBuffer) + if shouldDispatch { + let segInput = downscaleForSeg(originalImage) + // detach from the camera pool — `VideoFilter` will write the composite back into `buffer` + if let segCG = ciContext.createCGImage(segInput, from: segInput.extent) { + segQueue.async { [weak self] in + self?.runSegmentation(on: segCG) + } + } else { + segLock.lock() + inFlight = false + segLock.unlock() + } + } - // Scale the mask image to fit the bounds of the video frame. - let scaleX = originalImage.extent.width / maskImage.extent.width - let scaleY = originalImage.extent.height / maskImage.extent.height - maskImage = maskImage.transformed(by: .init(scaleX: scaleX, y: scaleY)) + guard var maskImage = mask else { + return originalImage + } - // Blend the original, background, and mask images. - let blendFilter = CIFilter.blendWithMask() - blendFilter.inputImage = originalImage - blendFilter.backgroundImage = backgroundImage - blendFilter.maskImage = maskImage + // Scale the mask image to fit the bounds of the video frame. + let scaleX = originalImage.extent.width / maskImage.extent.width + let scaleY = originalImage.extent.height / maskImage.extent.height + maskImage = maskImage.transformed(by: .init(scaleX: scaleX, y: scaleY)) - let result = blendFilter.outputImage - return result - } else { - return nil + // Blend the original, background, and mask images. + let blendFilter = CIFilter.blendWithMask() + blendFilter.inputImage = originalImage + blendFilter.backgroundImage = backgroundImage + blendFilter.maskImage = maskImage + + return blendFilter.outputImage + } + + private func downscaleForSeg(_ image: CIImage) -> CIImage { + // Run segmentation at ~540p — Vision's cost scales with input size. + // The mask-upscale step below already handles whatever size Vision returns. + if image.extent.height > Self.segmentationTargetHeight { + let scale = Self.segmentationTargetHeight / image.extent.height + let targetSize = CGSize( + width: image.extent.width * scale, + height: Self.segmentationTargetHeight + ) + return image.resize(targetSize) ?? image + } + return image + } + + /// Runs on `segQueue`. Performs Vision on the snapshotted `CGImage` and stores the + /// result mask under `segLock`. `inFlight` is always cleared via `defer`, so a thrown + /// `perform`, missing results, or failed snapshot won't deadlock future frames. + private func runSegmentation(on cgImage: CGImage) { + defer { + segLock.lock() + inFlight = false + segLock.unlock() + } + do { + try requestHandler.perform([request], on: cgImage, orientation: .up) + guard let maskPixelBuffer = request.results?.first?.pixelBuffer else { + return + } + let rawMask = CIImage(cvPixelBuffer: maskPixelBuffer) + // Snapshot to a CGImage so `lastMask` survives Vision's potential buffer reuse. + guard let maskCG = ciContext.createCGImage(rawMask, from: rawMask.extent) else { + return } + let snapshot = CIImage(cgImage: maskCG) + segLock.lock() + lastMask = snapshot + segLock.unlock() } catch { - return nil + return } } } diff --git a/packages/video-filters-react-native/ios/VideoFrameProcessors/Utils/VideoFilters.swift b/packages/video-filters-react-native/ios/VideoFrameProcessors/Utils/VideoFilters.swift index c478ae46aa..b456c6b0a2 100644 --- a/packages/video-filters-react-native/ios/VideoFrameProcessors/Utils/VideoFilters.swift +++ b/packages/video-filters-react-native/ios/VideoFrameProcessors/Utils/VideoFilters.swift @@ -79,7 +79,11 @@ open class VideoFilter: NSObject, VideoFrameProcessorDelegate { ) updateRotation() } - + + deinit { + NotificationCenter.default.removeObserver(self) + } + @objc private func updateRotation() { DispatchQueue.main.async { self.sceneOrientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation ?? .unknown diff --git a/packages/video-filters-react-native/package.json b/packages/video-filters-react-native/package.json index 94d1952914..700283f1f3 100644 --- a/packages/video-filters-react-native/package.json +++ b/packages/video-filters-react-native/package.json @@ -56,7 +56,7 @@ "typescript": "^5.9.3" }, "peerDependencies": { - "@stream-io/react-native-webrtc": ">=137.1.2", + "@stream-io/react-native-webrtc": ">=137.2.0", "react-native": "*" }, "react-native-builder-bob": { diff --git a/packages/video-filters-react-native/src/index.ts b/packages/video-filters-react-native/src/index.ts index daa5d33079..dfb9a1d3d4 100644 --- a/packages/video-filters-react-native/src/index.ts +++ b/packages/video-filters-react-native/src/index.ts @@ -56,3 +56,12 @@ export async function registerVirtualBackgroundFilter( export async function registerBlurVideoFilters(): Promise { return await VideoFiltersReactNative.registerBlurVideoFilters(); } + +/** + * Unregisters all filters that were previously registered via this module, + * allowing the native processor instances to be released. Safe to call even + * if no filters were registered. + */ +export async function unregisterAllFilters(): Promise { + return await VideoFiltersReactNative.unregisterAllFilters(); +} diff --git a/sample-apps/react-native/dogfood/ios/Podfile.lock b/sample-apps/react-native/dogfood/ios/Podfile.lock index 054234cfea..b69565a93c 100644 --- a/sample-apps/react-native/dogfood/ios/Podfile.lock +++ b/sample-apps/react-native/dogfood/ios/Podfile.lock @@ -3270,7 +3270,7 @@ PODS: - SocketRocket - stream-react-native-webrtc - Yoga - - stream-react-native-webrtc (137.1.3): + - stream-react-native-webrtc (137.2.0): - React-Core - StreamWebRTC (~> 137.0.54) - stream-video-react-native (1.32.3): @@ -3696,7 +3696,7 @@ SPEC CHECKSUMS: FBLazyVector: 82d1d7996af4c5850242966eb81e73f9a6dfab1e fmt: a40bb5bd0294ea969aaaba240a927bd33d878cdd glog: 5683914934d5b6e4240e497e0f4a3b42d1854183 - hermes-engine: 0e3a9e48a838b913a3f5cadce1be93c489cfbb05 + hermes-engine: 79258df51fb2de8c52574d7678c0aeb338e65c3b RCT-Folly: 846fda9475e61ec7bcbf8a3fe81edfcaeb090669 RCTDeprecation: 9da1d0cf93db23ca8b41e8efe9ae558fd9c0077f RCTRequired: 92a63c7041031a131fa5206eb082d53f95729b79 @@ -3787,7 +3787,7 @@ SPEC CHECKSUMS: stream-chat-react-native: 892a55dc716349e9c953286116a51ff410e9f1a3 stream-io-noise-cancellation-react-native: ea8ca1d50e305f2a0ffa027ff36c345aa5278237 stream-io-video-filters-react-native: 43d4e9901cf478a1340a599242226d024c2eb1a5 - stream-react-native-webrtc: 98f68f17acc6bd95b5cc417dfdc0953e0120e696 + stream-react-native-webrtc: d5e9e2bfdff70415d153b9ad8e6f1cb7aa3bbb0e stream-video-react-native: 177794d3bf97980312b57959dea422b80e6576e5 StreamVideoNoiseCancellation: 41f5a712aba288f9636b64b17ebfbdff52c61490 StreamWebRTC: 57bd35729bcc46b008de4e741a5b23ac28b8854d @@ -3797,4 +3797,4 @@ SPEC CHECKSUMS: PODFILE CHECKSUM: 0a22ee65b5bc47bc9d8a62deb3ee46f06752313f -COCOAPODS: 1.16.2 +COCOAPODS: 1.15.2 diff --git a/sample-apps/react-native/dogfood/package.json b/sample-apps/react-native/dogfood/package.json index ee74c6226d..fcaa464f17 100644 --- a/sample-apps/react-native/dogfood/package.json +++ b/sample-apps/react-native/dogfood/package.json @@ -22,7 +22,7 @@ "@react-navigation/native-stack": "^7.3.27", "@stream-io/noise-cancellation-react-native": "workspace:^", "@stream-io/react-native-callingx": "workspace:^", - "@stream-io/react-native-webrtc": "137.1.3", + "@stream-io/react-native-webrtc": "137.2.0", "@stream-io/video-filters-react-native": "workspace:^", "@stream-io/video-react-native-sdk": "workspace:^", "axios": "^1.12.2", diff --git a/sample-apps/react-native/expo-video-sample/package.json b/sample-apps/react-native/expo-video-sample/package.json index b1ec62af65..329fa811df 100644 --- a/sample-apps/react-native/expo-video-sample/package.json +++ b/sample-apps/react-native/expo-video-sample/package.json @@ -20,7 +20,7 @@ "@react-native-firebase/messaging": "~23.7.0", "@stream-io/noise-cancellation-react-native": "workspace:^", "@stream-io/react-native-callingx": "workspace:^", - "@stream-io/react-native-webrtc": "137.1.3", + "@stream-io/react-native-webrtc": "137.2.0", "@stream-io/video-filters-react-native": "workspace:^", "@stream-io/video-react-native-sdk": "workspace:^", "expo": "^54.0.12", diff --git a/sample-apps/react-native/ringing-tutorial/package.json b/sample-apps/react-native/ringing-tutorial/package.json index 752934dfb9..c40e0980f2 100644 --- a/sample-apps/react-native/ringing-tutorial/package.json +++ b/sample-apps/react-native/ringing-tutorial/package.json @@ -22,7 +22,7 @@ "@react-navigation/bottom-tabs": "^7.4.8", "@react-navigation/native": "^7.1.18", "@stream-io/react-native-callingx": "workspace:^", - "@stream-io/react-native-webrtc": "137.1.3", + "@stream-io/react-native-webrtc": "137.2.0", "@stream-io/video-react-native-sdk": "workspace:^", "expo": "^55.0.0", "expo-blur": "~55.0.10", diff --git a/yarn.lock b/yarn.lock index 071756f631..63c09f2a50 100644 --- a/yarn.lock +++ b/yarn.lock @@ -8364,7 +8364,7 @@ __metadata: "@rnx-kit/metro-resolver-symlinks": "npm:^0.2.6" "@stream-io/noise-cancellation-react-native": "workspace:^" "@stream-io/react-native-callingx": "workspace:^" - "@stream-io/react-native-webrtc": "npm:137.1.3" + "@stream-io/react-native-webrtc": "npm:137.2.0" "@stream-io/video-filters-react-native": "workspace:^" "@stream-io/video-react-native-sdk": "workspace:^" "@types/react": "npm:~19.1.17" @@ -8514,6 +8514,18 @@ __metadata: languageName: node linkType: hard +"@stream-io/react-native-webrtc@npm:137.2.0": + version: 137.2.0 + resolution: "@stream-io/react-native-webrtc@npm:137.2.0" + dependencies: + base64-js: "npm:1.5.1" + debug: "npm:4.3.4" + peerDependencies: + react-native: ">=0.73.0" + checksum: 10/d9aa189f0602ee4a6d503d735345af846647f0265df8a5ed5f54bc1f034536d294ed0ab8ec2af30bacc73c1b7526d5fca6da539648b938ce33ea97d058a279be + languageName: node + linkType: hard + "@stream-io/stream-video-react-tutorial@workspace:sample-apps/react/stream-video-react-tutorial": version: 0.0.0-use.local resolution: "@stream-io/stream-video-react-tutorial@workspace:sample-apps/react/stream-video-react-tutorial" @@ -8607,7 +8619,7 @@ __metadata: rimraf: "npm:^6.0.1" typescript: "npm:^5.9.3" peerDependencies: - "@stream-io/react-native-webrtc": ">=137.1.2" + "@stream-io/react-native-webrtc": ">=137.2.0" react-native: "*" languageName: unknown linkType: soft @@ -8742,7 +8754,7 @@ __metadata: "@rnx-kit/metro-resolver-symlinks": "npm:^0.2.6" "@stream-io/noise-cancellation-react-native": "workspace:^" "@stream-io/react-native-callingx": "workspace:^" - "@stream-io/react-native-webrtc": "npm:137.1.3" + "@stream-io/react-native-webrtc": "npm:137.2.0" "@stream-io/video-filters-react-native": "workspace:^" "@stream-io/video-react-native-sdk": "workspace:^" "@types/react": "npm:^19.2.0" @@ -8792,7 +8804,7 @@ __metadata: "@rnx-kit/metro-config": "npm:^2.1.2" "@rnx-kit/metro-resolver-symlinks": "npm:^0.2.6" "@stream-io/react-native-callingx": "workspace:^" - "@stream-io/react-native-webrtc": "npm:137.1.3" + "@stream-io/react-native-webrtc": "npm:137.2.0" "@stream-io/video-react-native-sdk": "workspace:^" "@types/react": "npm:~19.2.10" expo: "npm:^55.0.0" @@ -8840,7 +8852,7 @@ __metadata: "@react-native/babel-preset": "npm:^0.81.5" "@stream-io/noise-cancellation-react-native": "workspace:^" "@stream-io/react-native-callingx": "workspace:^" - "@stream-io/react-native-webrtc": "npm:137.1.3" + "@stream-io/react-native-webrtc": "npm:137.2.0" "@stream-io/video-client": "workspace:*" "@stream-io/video-filters-react-native": "workspace:^" "@stream-io/video-react-bindings": "workspace:*" @@ -8877,7 +8889,7 @@ __metadata: "@react-native-firebase/messaging": ">=17.5.0" "@stream-io/noise-cancellation-react-native": ">=0.1.0" "@stream-io/react-native-callingx": ">=0.1.0" - "@stream-io/react-native-webrtc": ">=137.1.3" + "@stream-io/react-native-webrtc": ">=137.2.0" "@stream-io/video-filters-react-native": ">=0.1.0" expo: ">=47.0.0" expo-build-properties: "*"