Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions packages/react-native-sdk/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
"@react-native-firebase/messaging": ">=17.5.0",
"@stream-io/noise-cancellation-react-native": ">=0.1.0",
"@stream-io/react-native-callingx": ">=0.1.0",
"@stream-io/react-native-webrtc": ">=137.1.3",
"@stream-io/react-native-webrtc": ">=137.2.0",
"@stream-io/video-filters-react-native": ">=0.1.0",
"expo": ">=47.0.0",
"expo-build-properties": "*",
Expand Down Expand Up @@ -127,7 +127,7 @@
"@react-native/babel-preset": "^0.81.5",
"@stream-io/noise-cancellation-react-native": "workspace:^",
"@stream-io/react-native-callingx": "workspace:^",
"@stream-io/react-native-webrtc": "137.1.3",
"@stream-io/react-native-webrtc": "137.2.0",
"@stream-io/video-filters-react-native": "workspace:^",
"@testing-library/jest-native": "^5.4.3",
"@testing-library/react-native": "13.3.3",
Expand Down
64 changes: 55 additions & 9 deletions packages/react-native-sdk/src/contexts/BackgroundFilters.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import React, {
type PropsWithChildren,
useCallback,
useContext,
useEffect,
useMemo,
useRef,
useState,
Expand Down Expand Up @@ -76,6 +77,10 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => {
const isBackgroundBlurRegisteredRef = useRef(false);
const isVideoBlurRegisteredRef = useRef(false);
const registeredImageFiltersSetRef = useRef(new Set<string>());
// The currently applied native filter name. Used to reapply on track
// replacement, and as a staleness signal so a later apply/disable can
// invalidate an in-flight apply() call.
const lastAppliedFilterNameRef = useRef<string | null>(null);
Comment thread
coderabbitai[bot] marked this conversation as resolved.

const [currentBackgroundFilter, setCurrentBackgroundFilter] =
useState<CurrentBackgroundFilter>();
Expand All @@ -85,16 +90,19 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => {
if (!isSupported) {
return;
}
if (!isBackgroundBlurRegisteredRef.current) {
await videoFiltersModule?.registerBackgroundBlurVideoFilters();
isBackgroundBlurRegisteredRef.current = true;
}
let filterName = 'BackgroundBlurMedium';
if (blurIntensity === 'heavy') {
filterName = 'BackgroundBlurHeavy';
} else if (blurIntensity === 'light') {
filterName = 'BackgroundBlurLight';
}
// Set before awaiting so a later apply/disable can mark this call stale.
lastAppliedFilterNameRef.current = filterName;
if (!isBackgroundBlurRegisteredRef.current) {
await videoFiltersModule?.registerBackgroundBlurVideoFilters();
if (lastAppliedFilterNameRef.current !== filterName) return;
isBackgroundBlurRegisteredRef.current = true;
}
call?.tracer.trace('backgroundFilters.apply', filterName);
(call?.camera.state.mediaStream as MediaStream | undefined)
?.getVideoTracks()
Expand All @@ -111,16 +119,18 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => {
if (!isSupported) {
return;
}
if (!isVideoBlurRegisteredRef.current) {
await videoFiltersModule?.registerBlurVideoFilters();
isVideoBlurRegisteredRef.current = true;
}
let filterName = 'BlurMedium';
if (blurIntensity === 'heavy') {
filterName = 'BlurHeavy';
} else if (blurIntensity === 'light') {
filterName = 'BlurLight';
}
lastAppliedFilterNameRef.current = filterName;
if (!isVideoBlurRegisteredRef.current) {
await videoFiltersModule?.registerBlurVideoFilters();
if (lastAppliedFilterNameRef.current !== filterName) return;
isVideoBlurRegisteredRef.current = true;
}
call?.tracer.trace('videoFilters.apply', filterName);
(call?.camera.state.mediaStream as MediaStream | undefined)
?.getVideoTracks()
Expand All @@ -139,12 +149,14 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => {
}
const source = Image.resolveAssetSource(imageSource);
const imageUri = source.uri;
const filterName = `VirtualBackground-${imageUri}`;
lastAppliedFilterNameRef.current = filterName;
const registeredImageFiltersSet = registeredImageFiltersSetRef.current;
if (!registeredImageFiltersSet.has(imageUri)) {
await videoFiltersModule?.registerVirtualBackgroundFilter(imageSource);
if (lastAppliedFilterNameRef.current !== filterName) return;
registeredImageFiltersSetRef.current.add(imageUri);
}
const filterName = `VirtualBackground-${imageUri}`;
call?.tracer.trace('backgroundFilters.apply', filterName);
(call?.camera.state.mediaStream as MediaStream | undefined)
?.getVideoTracks()
Expand All @@ -161,6 +173,8 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => {
return;
}
call?.tracer.trace('backgroundFilters.disableAll', null);
// Clearing the ref invalidates any in-flight apply β€” its stale check will bail.
lastAppliedFilterNameRef.current = null;
(call?.camera.state.mediaStream as MediaStream | undefined)
?.getVideoTracks()
.forEach((track) => {
Expand All @@ -169,6 +183,38 @@ export const BackgroundFiltersProvider = ({ children }: PropsWithChildren) => {
setCurrentBackgroundFilter(undefined);
}, [call]);

// Reapplies the filter on track replacement (flip, enable-after-disable).
// Releases native filter state on unmount / call change.
useEffect(() => {
if (!call || !isSupported) return;
const registeredImageFiltersSet = registeredImageFiltersSetRef.current;
const subscription = call.camera.state.mediaStream$.subscribe(() => {
const name = lastAppliedFilterNameRef.current;
if (!name) return;
(call.camera.state.mediaStream as MediaStream | undefined)
?.getVideoTracks()
.forEach((track) => {
track._setVideoEffect(name);
});
});
return () => {
subscription.unsubscribe();
(call.camera.state.mediaStream as MediaStream | undefined)
?.getVideoTracks()
.forEach((track) => {
track._setVideoEffect(null);
});
// Drop native processor refs so they can be deallocated. Otherwise the
// ProcessorProvider registry holds them for the app's lifetime.
videoFiltersModule?.unregisterAllFilters?.().catch(() => {});
lastAppliedFilterNameRef.current = null;
isBackgroundBlurRegisteredRef.current = false;
isVideoBlurRegisteredRef.current = false;
registeredImageFiltersSet.clear();
setCurrentBackgroundFilter(undefined);
};
}, [call]);

const value = useMemo(
() => ({
currentBackgroundFilter,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ import com.streamio.videofiltersreactnative.factories.*
class VideoFiltersReactNativeModule(reactContext: ReactApplicationContext) :
ReactContextBaseJavaModule(reactContext) {

// Names we add to the global ProcessorProvider, so unregisterAllFilters can
// release them. Otherwise factories accumulate for the app's lifetime.
private val registeredNames = mutableSetOf<String>()
Comment thread
santhoshvai marked this conversation as resolved.

override fun getName(): String {
return NAME
}
Expand All @@ -36,15 +40,18 @@ class VideoFiltersReactNativeModule(reactContext: ReactApplicationContext) :
"BackgroundBlurHeavy",
BackgroundBlurFactory(BlurIntensity.HEAVY)
)
registeredNames.addAll(listOf("BackgroundBlurLight", "BackgroundBlurMedium", "BackgroundBlurHeavy"))
promise.resolve(true)
}

@ReactMethod
fun registerVirtualBackgroundFilter(backgroundImageUrlString: String, promise: Promise) {
val name = "VirtualBackground-$backgroundImageUrlString"
ProcessorProvider.addProcessor(
"VirtualBackground-$backgroundImageUrlString",
name,
VirtualBackgroundFactory(reactApplicationContext, backgroundImageUrlString)
)
registeredNames.add(name)
promise.resolve(true)
}

Expand All @@ -53,9 +60,19 @@ class VideoFiltersReactNativeModule(reactContext: ReactApplicationContext) :
ProcessorProvider.addProcessor("BlurLight", VideoBlurFactory(VideoBlurIntensity.LIGHT))
ProcessorProvider.addProcessor("BlurMedium", VideoBlurFactory(VideoBlurIntensity.MEDIUM))
ProcessorProvider.addProcessor("BlurHeavy", VideoBlurFactory(VideoBlurIntensity.HEAVY))
registeredNames.addAll(listOf("BlurLight", "BlurMedium", "BlurHeavy"))
promise.resolve(true)
}

@ReactMethod
fun unregisterAllFilters(promise: Promise) {
for (name in registeredNames) {
ProcessorProvider.removeProcessor(name)
}
registeredNames.clear()
promise.resolve(true)
}

companion object {
const val NAME = "VideoFiltersReactNative"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,6 @@ import android.graphics.Bitmap
*/
abstract class BitmapVideoFilter {
abstract fun applyFilter(videoFrameBitmap: Bitmap)

open fun close() {}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,24 +18,24 @@ import org.webrtc.YuvConverter
class VideoFrameProcessorWithBitmapFilter(bitmapVideoFilterFunc: () -> BitmapVideoFilter) :
VideoFrameProcessor {
private val yuvConverter = YuvConverter()
private val yuvFrame = YuvFrame()
private var inputWidth = 0
private var inputHeight = 0
private var inputBuffer: VideoFrame.TextureBuffer? = null
private var yuvBuffer: VideoFrame.I420Buffer? = null
private val textures = IntArray(1)
private var inputFrameBitmap: Bitmap? = null

private val bitmapVideoFilter by lazy {
bitmapVideoFilterFunc.invoke()
}
private val bitmapVideoFilterLazy = lazy { bitmapVideoFilterFunc.invoke() }
private val bitmapVideoFilter: BitmapVideoFilter by bitmapVideoFilterLazy

init {
GLES20.glGenTextures(1, textures, 0)
}

override fun process(frame: VideoFrame, surfaceTextureHelper: SurfaceTextureHelper): VideoFrame {
// Step 1: Video Frame to Bitmap
val inputFrameBitmap = YuvFrame.bitmapFromVideoFrame(frame) ?: return frame
val inputFrameBitmap = yuvFrame.bitmapFromVideoFrame(frame) ?: return frame

// Prepare helpers (runs only once or if the dimensions change)
initialize(
Expand Down Expand Up @@ -66,9 +66,6 @@ class VideoFrameProcessorWithBitmapFilter(bitmapVideoFilterFunc: () -> BitmapVid
}

private fun initialize(width: Int, height: Int, textureHelper: SurfaceTextureHelper) {
// TODO: temporarily disabled due to crash: java.lang.IllegalStateException: release() called on an object with refcount < 1
// yuvBuffer?.release()

if (this.inputWidth != width || this.inputHeight != height) {
Log.d(TAG, "initialize - width: $width height: $height")
this.inputWidth = width
Expand All @@ -92,6 +89,25 @@ class VideoFrameProcessorWithBitmapFilter(bitmapVideoFilterFunc: () -> BitmapVid
}
}

// Runs on the GL thread serialized with process() β€” inline cleanup is safe.
// Always delete the texture: glGenTextures in init can return a valid id even
// before initialize() runs, so enable-then-disable would leak it otherwise.
override fun dispose() {
if (bitmapVideoFilterLazy.isInitialized()) {
bitmapVideoFilter.close()
}
yuvFrame.close()
inputFrameBitmap?.recycle()
inputFrameBitmap = null
yuvConverter.release()
inputBuffer?.release()
inputBuffer = null
if (textures[0] != 0) {
GLES20.glDeleteTextures(1, intArrayOf(textures[0]), 0)
textures[0] = 0
}
}

companion object {
private const val TAG = "VideoFrameProcessorWithBitmapFilter"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,7 @@ import io.github.crow_misia.libyuv.RotateMode
import io.github.crow_misia.libyuv.RowStride
import org.webrtc.VideoFrame

object YuvFrame {
private const val TAG = "YuvFrame"

class YuvFrame {
private lateinit var webRtcI420Buffer: VideoFrame.I420Buffer
private lateinit var libYuvI420Buffer: I420Buffer
private var libYuvRotatedI420Buffer: I420Buffer? = null
Expand Down Expand Up @@ -94,4 +92,16 @@ object YuvFrame {
webRtcI420Buffer.release()
// Rest of buffers are closed in the methods above
}

/** Frees the libyuv buffers. GC alone won't reclaim their off-heap memory. Idempotent. */
fun close() {
libYuvRotatedI420Buffer?.close()
libYuvRotatedI420Buffer = null
libYuvAbgrBuffer?.close()
libYuvAbgrBuffer = null
}

companion object {
private const val TAG = "YuvFrame"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,15 @@ private class BlurredBackgroundVideoFilter(
blurredBackgroundBitmap.recycle()
}

// Free the ML Kit segmenter and cached bitmaps held by the filter.
override fun close() {
segmenter.close()
backgroundMaskBitmap?.recycle()
backgroundMaskBitmap = null
downScaledBackgroundBitmap?.recycle()
downScaledBackgroundBitmap = null
}

private fun maybeInit(videoFrameBitmap: Bitmap, mask: SegmentationMask) {
var createScale = false
if (currentFrameWidth != videoFrameBitmap.width || currentFrameHeight != videoFrameBitmap.height) {
Expand Down
Loading
Loading