Ezmary's picture
Update src/components/control-tray/ControlTray.tsx
7da7877 verified
raw
history blame
14.8 kB
/**
Copyright 2024 Google LLC
... (لایسنس و توضیحات دیگر مثل قبل) ...
*/
import cn from "classnames";
import { memo, ReactNode, RefObject, useEffect, useRef, useState, useCallback } from "react"; // useCallback added
import { useLiveAPIContext } from "../../contexts/LiveAPIContext";
// import { UseMediaStreamResult } from "../../hooks/use-media-stream-mux"; // Seem unused directly here
import { useScreenCapture } from "../../hooks/use-screen-capture";
import { useWebcam } from "../../hooks/use-webcam";
import { AudioRecorder } from "../../lib/audio-recorder";
import { isIOS } from "../../lib/platform";
import AudioPulse from "../audio-pulse/AudioPulse";
import "./control-tray.scss";
export type ControlTrayProps = {
videoRef: RefObject<HTMLVideoElement>;
children?: ReactNode; // Might not be used if UI is hidden
supportsVideo: boolean;
onVideoStreamChange?: (stream: MediaStream | null) => void;
isUiHidden?: boolean; // New prop to hide default UI
};
// MediaStreamButton might not be needed if App.tsx handles buttons
// const MediaStreamButton = memo(...);
function ControlTray({
videoRef,
children,
onVideoStreamChange = () => {},
supportsVideo,
isUiHidden = false, // Default to false
}: ControlTrayProps) {
const webcam = useWebcam();
const screenCapture = useScreenCapture();
const [activeVideoStream, setActiveVideoStream] = useState<MediaStream | null>(null);
const [currentFacingModeInternal, setCurrentFacingModeInternal] = useState<'user' | 'environment' | null>(null);
const [isSwitchingCamera, setIsSwitchingCamera] = useState(false);
const [isLikelyDesktop, setIsLikelyDesktop] = useState(false);
const [inVolume, setInVolume] = useState(0);
const [audioRecorder] = useState(() => new AudioRecorder());
// const [muted, setMuted] = useState(false); // Muted state will be controlled by LiveAPIContext
const renderCanvasRef = useRef<HTMLCanvasElement>(null);
const connectButtonRef = useRef<HTMLButtonElement>(null); // May not be needed if App.tsx handles connect button
const [simulatedVolume, setSimulatedVolume] = useState(0);
const isIOSDevice = isIOS();
const { client, connected, connect, disconnect, volume, setMuted: contextSetMuted, setSystemInstruction, updateLiveConfig, currentFacingMode, setCurrentFacingMode, rotateWebcam: contextRotateWebcam, changeStreams: contextChangeStreams } = useLiveAPIContext();
// const isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
// Expose rotateWebcam and changeStreams to context if they are not already there
useEffect(() => {
if (setCurrentFacingMode) setCurrentFacingMode(currentFacingModeInternal);
}, [currentFacingModeInternal, setCurrentFacingMode]);
const changeStreamsInternal = useCallback(async (streamType: 'webcam' | 'screen' | 'none') => {
if (isSwitchingCamera) return;
if (streamType === 'screen' && !isLikelyDesktop) {
console.warn("Screen share requested on non-desktop device, ignoring.");
return;
}
// Stop existing streams first
if (activeVideoStream) {
activeVideoStream.getTracks().forEach(track => track.stop());
}
webcam.stop();
screenCapture.stop();
setActiveVideoStream(null);
onVideoStreamChange(null);
setCurrentFacingModeInternal(null);
if (streamType === 'webcam') {
const initialFacingMode = 'user';
console.log(`🚀 Starting webcam with initial facingMode: ${initialFacingMode}`);
try {
const mediaStream = await navigator.mediaDevices.getUserMedia({ video: { facingMode: initialFacingMode }, audio: false });
setActiveVideoStream(mediaStream);
onVideoStreamChange(mediaStream);
setCurrentFacingModeInternal(initialFacingMode);
} catch (error) {
console.error(`❌ Error starting webcam with ${initialFacingMode}:`, error);
try {
const fallbackStream = await navigator.mediaDevices.getUserMedia({ video: { facingMode: 'environment' }, audio: false });
setActiveVideoStream(fallbackStream);
onVideoStreamChange(fallbackStream);
setCurrentFacingModeInternal('environment');
} catch (fallbackError) {
console.error('❌ Error starting webcam fallback:', fallbackError);
// No stream set
}
}
} else if (streamType === 'screen' && isLikelyDesktop) {
console.log('🚀 Starting screen capture');
try {
const mediaStream = await screenCapture.start();
setActiveVideoStream(mediaStream);
onVideoStreamChange(mediaStream);
setCurrentFacingModeInternal(null); // Screen share doesn't have a facing mode
} catch (error) {
console.error('❌ Error starting screen capture:', error);
}
} else {
console.log('ℹ️ Video stream turned off or invalid request.');
}
}, [isSwitchingCamera, isLikelyDesktop, webcam, screenCapture, onVideoStreamChange, activeVideoStream]);
const rotateWebcamInternal = useCallback(async () => {
if (isSwitchingCamera || !activeVideoStream || currentFacingModeInternal === null) return;
const targetFacingMode = currentFacingModeInternal === 'user' ? 'environment' : 'user';
console.log(`🔄 Rotating webcam... Target: ${targetFacingMode}`);
setIsSwitchingCamera(true);
// Stop only the tracks of the active video stream
activeVideoStream.getTracks().forEach(track => track.stop());
try {
const newStream = await navigator.mediaDevices.getUserMedia({ video: { facingMode: { exact: targetFacingMode } }, audio: false });
if (videoRef.current) {
videoRef.current.srcObject = newStream;
videoRef.current.play().catch(e => console.warn("Play fail switch:", e));
}
setActiveVideoStream(newStream);
onVideoStreamChange(newStream);
setCurrentFacingModeInternal(targetFacingMode);
} catch (error: any) {
console.error(`❌ Error switching camera:`, error.name);
// Fallback logic...
let recoveredStream: MediaStream | null = null;
if (error.name === 'OverconstrainedError' || error.name === 'ConstraintNotSatisfiedError') {
try {
recoveredStream = await navigator.mediaDevices.getUserMedia({ video: { facingMode: targetFacingMode }, audio: false }); // Try without exact
setCurrentFacingModeInternal(targetFacingMode);
} catch (retryError: any) { console.error(`Retry fail:`, retryError.name); }
}
if (!recoveredStream) { // Try to restore original
try {
recoveredStream = await navigator.mediaDevices.getUserMedia({ video: { facingMode: { exact: currentFacingModeInternal } }, audio: false });
} catch (restoreError) { console.error(`Restore fail:`, restoreError); }
}
if (recoveredStream) {
if (videoRef.current) { videoRef.current.srcObject = recoveredStream; videoRef.current.play().catch(e => console.warn("Play fail recovery:", e)); }
setActiveVideoStream(recoveredStream);
onVideoStreamChange(recoveredStream);
} else { // Total failure
if (videoRef.current) videoRef.current.srcObject = null;
setActiveVideoStream(null);
onVideoStreamChange(null);
setCurrentFacingModeInternal(null);
}
} finally {
setIsSwitchingCamera(false);
}
}, [isSwitchingCamera, activeVideoStream, currentFacingModeInternal, videoRef, onVideoStreamChange]);
// Provide these functions to the context if they don't exist or need overriding
useEffect(() => {
if (updateLiveConfig && !contextChangeStreams) {
updateLiveConfig({ changeStreams: changeStreamsInternal });
}
if (updateLiveConfig && !contextRotateWebcam) {
updateLiveConfig({ rotateWebcam: rotateWebcamInternal });
}
}, [updateLiveConfig, changeStreamsInternal, contextChangeStreams, rotateWebcamInternal, contextRotateWebcam]);
// --- useEffect ها ---
useEffect(() => {
const desktopCheck = typeof navigator !== 'undefined' && navigator.maxTouchPoints <= 0;
setIsLikelyDesktop(desktopCheck);
}, []);
// iOS volume simulation
useEffect(() => {
let interval: number | undefined;
if (isIOSDevice && connected && !client?.isMuted) { // Assuming client.isMuted reflects actual mute state
interval = window.setInterval(() => {
const pulse = (Math.sin(Date.now() / 500) + 1) / 2;
setSimulatedVolume(0.02 + pulse * 0.03);
}, 50);
}
return () => {
if (interval) clearInterval(interval);
};
}, [connected, client?.isMuted, isIOSDevice]);
// CSS volume update for mic pulse
useEffect(() => {
document.documentElement.style.setProperty(
"--volume",
`${Math.max(5, Math.min((isIOSDevice ? simulatedVolume : inVolume) * 200, 12))}px`, // Increased max pulse for visibility
);
}, [inVolume, simulatedVolume, isIOSDevice]);
// Audio recording
useEffect(() => {
const onData = (base64: string) => {
if (client && connected) {
client.sendRealtimeInput([{ mimeType: "audio/pcm;rate=16000", data: base64 }]);
}
};
if (connected && !client?.isMuted && audioRecorder) { // Use client.isMuted
audioRecorder.on("data", onData).on("volume", setInVolume).start();
} else if (audioRecorder) {
audioRecorder.stop();
}
return () => {
if (audioRecorder) {
audioRecorder.off("data", onData).off("volume", setInVolume).stop();
}
};
}, [connected, client, audioRecorder, client?.isMuted]); // Dependency on client.isMuted
// Stop video on disconnect
useEffect(() => {
if (!connected && activeVideoStream) {
console.log('🔌 Disconnected, stopping video stream.');
activeVideoStream.getTracks().forEach(track => track.stop());
setActiveVideoStream(null);
onVideoStreamChange(null);
setCurrentFacingModeInternal(null);
setIsSwitchingCamera(false);
webcam.stop();
screenCapture.stop();
}
}, [connected, activeVideoStream, onVideoStreamChange, webcam, screenCapture]);
// Video frame sending
useEffect(() => {
let timeoutId = -1;
function sendVideoFrame() {
if (connected && activeVideoStream) {
timeoutId = window.setTimeout(sendVideoFrame, 1000 / 0.5); // Target 0.5 FPS for video
}
const video = videoRef.current; const canvas = renderCanvasRef.current;
if (!video || !canvas || video.readyState < video.HAVE_METADATA || video.paused || video.ended || !client) return;
try {
const ctx = canvas.getContext("2d"); if (!ctx) return;
const scale = 0.25; canvas.width = video.videoWidth * scale; canvas.height = video.videoHeight * scale;
if (canvas.width > 0 && canvas.height > 0) {
// Flip image if user-facing camera and it's not already flipped by CSS
// The new HTML uses scale-x-[-1] so browser handles mirroring.
// If browser doesn't mirror stream itself, and you need to send mirrored frames:
// if (currentFacingModeInternal === 'user') {
// ctx.scale(-1, 1);
// ctx.drawImage(video, -canvas.width, 0, canvas.width, canvas.height);
// } else {
// ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
// }
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
const base64 = canvas.toDataURL("image/jpeg", 0.8);
const data = base64.slice(base64.indexOf(",") + 1);
client.sendRealtimeInput([{ mimeType: "image/jpeg", data }]);
}
} catch (error) { console.error("❌ Error processing video frame:", error); }
}
if (connected && activeVideoStream && videoRef.current) { setTimeout(sendVideoFrame, 200); }
return () => { clearTimeout(timeoutId); };
}, [connected, activeVideoStream, client, videoRef, currentFacingModeInternal]); // Added currentFacingModeInternal
// Assign stream to video element
useEffect(() => {
if (videoRef.current) {
if (videoRef.current.srcObject !== activeVideoStream) {
videoRef.current.srcObject = activeVideoStream;
if (activeVideoStream) { videoRef.current.play().catch(e => console.warn("Video play failed:", e)); }
}
}
}, [activeVideoStream, videoRef]);
// If UI is hidden, render minimal or nothing
if (isUiHidden) {
return (
<>
<canvas style={{ display: "none" }} ref={renderCanvasRef} />
{/* AudioPulse might still be useful for the global --volume CSS var */}
<div style={{display: 'none'}}>
<AudioPulse volume={volume} active={connected && !client?.isMuted} hover={false} />
</div>
</>
);
}
// --- Original UI (fallback if isUiHidden is false) ---
// This part will be shown if you don't pass `isUiHidden={true}` from App.tsx
// For your new design, this will likely not be rendered.
return (
<section className="control-tray">
<canvas style={{ display: "none" }} ref={renderCanvasRef} />
<nav className={cn("actions-nav", { disabled: !connected })}>
<button
className={cn("action-button mic-button")}
onClick={() => contextSetMuted(client?.isMuted ? false : true)} // Toggle mute state
disabled={!connected || isSwitchingCamera}
title={client?.isMuted ? "Unmute Microphone" : "Mute Microphone"}
>
<span className="material-symbols-outlined filled">
{client?.isMuted ? "mic_off" : "mic"}
</span>
</button>
<div className="action-button no-action outlined">
<AudioPulse volume={volume} active={connected && !client?.isMuted} hover={false} />
</div>
{/* ... other original buttons ... This part needs careful review if you want to mix UIs */}
</nav>
<div className={cn("connection-container", { connected })}>
<div className="connection-button-container">
<button
ref={connectButtonRef}
className={cn("action-button connect-toggle", { connected })}
onClick={async () => {
if (isSwitchingCamera) return;
try {
if (connected) { await disconnect(); } else { await connect(); }
} catch (err) { console.error('❌ Connection/Disconnection error:', err); }
}}
disabled={isSwitchingCamera}
title={connected ? "Disconnect Stream" : "Connect Stream"}
>
<span className="material-symbols-outlined filled">{connected ? "pause" : "play_arrow"}</span>
</button>
</div>
<span className="text-indicator">{connected ? "Streaming" : "Paused"}</span>
</div>
{children}
</section>
);
}
export default memo(ControlTray);