michon commited on
Commit
866a950
Β·
1 Parent(s): 08943e9

chat history try 4

Browse files
avatar-frontend/app/page.tsx CHANGED
@@ -7,11 +7,11 @@ import { flushSync } from "react-dom";
7
  import * as THREE from "three";
8
 
9
  /* -------------------- CONSTANTS & TYPES -------------------- */
10
- const DEFAULT_AVATAR = ""; // Avaturn only
11
 
12
  const getWebSocketURL = () => {
13
  if (typeof window === "undefined") return "ws://localhost:8000/ws";
14
- const protocol = window.location.protocol === "https:" ? "wss:" : "ws:";
15
  return `${protocol}//${window.location.host}/ws`;
16
  };
17
  const BACKEND_WS = getWebSocketURL();
@@ -28,8 +28,6 @@ interface Message {
28
 
29
  /* -------------------- DYNAMIC CDN IMPORT -------------------- */
30
  async function importFromCdn(url: string) {
31
- // avoid bundler resolution at build-time
32
- // eslint-disable-next-line no-eval
33
  return (0, eval)(`import(${JSON.stringify(url)})`);
34
  }
35
 
@@ -56,7 +54,7 @@ async function toBlobObjectUrlViaProxy(httpUrl: string) {
56
  return URL.createObjectURL(blob);
57
  }
58
 
59
- /* -------------------- AVATURN MODAL (CDN SDK) - FIXED -------------------- */
60
  function AvaturnModal({
61
  open,
62
  onClose,
@@ -69,7 +67,6 @@ function AvaturnModal({
69
  subdomain?: string;
70
  }) {
71
  const containerRef = React.useRef<HTMLDivElement | null>(null);
72
- const [initError, setInitError] = useState<string | null>(null);
73
  const [sdkStatus, setSdkStatus] = useState<string>("Loading...");
74
  const sdkRef = useRef<any>(null);
75
  const onExportRef = useRef(onExport);
@@ -83,8 +80,7 @@ function AvaturnModal({
83
  let cancelled = false;
84
 
85
  (async () => {
86
- try {
87
- console.log('[Avaturn] Starting SDK initialization...');
88
  setSdkStatus("Loading SDK...");
89
 
90
  const mod: any = await importFromCdn(
@@ -92,30 +88,22 @@ function AvaturnModal({
92
  );
93
 
94
  if (cancelled) return;
95
- console.log('[Avaturn] SDK module loaded:', mod);
96
 
97
  const AvaturnSDK = mod?.AvaturnSDK;
98
  if (!AvaturnSDK) {
99
- throw new Error("AvaturnSDK not found in module");
100
  }
101
 
102
- console.log('[Avaturn] Creating SDK instance...');
103
  const sdk = new AvaturnSDK();
104
  sdkRef.current = sdk;
105
 
106
- setSdkStatus("Initializing Avaturn...");
107
  const url = `https://${subdomain}.avaturn.dev`;
108
- console.log('[Avaturn] Init URL:', url);
109
 
110
  await sdk.init(containerRef.current!, { url });
111
-
112
- console.log('[Avaturn] SDK initialized successfully!');
113
- setSdkStatus("Ready! Customize your avatar");
114
 
115
  sdk.on("export", (data: any) => {
116
- console.log('[Avaturn] βœ… EXPORT EVENT FIRED!');
117
- console.log('[Avaturn] Export data:', data);
118
-
119
  const glb =
120
  data?.links?.glb?.url ||
121
  data?.links?.glb ||
@@ -126,24 +114,14 @@ function AvaturnModal({
126
  data?.data?.glb?.url ||
127
  data?.data?.glb;
128
 
129
- console.log('[Avaturn] Extracted GLB URL:', glb);
130
-
131
  if (glb) {
132
  onExportRef.current(glb);
133
- } else {
134
- console.error('[Avaturn] No GLB URL found in export data!');
135
- alert('Export succeeded but no GLB URL found. Check console for data.');
136
  }
137
  });
138
 
139
- sdk.on("*", (eventName: string, data: any) => {
140
- console.log('[Avaturn] Event:', eventName, data);
141
- });
142
-
143
  } catch (e: any) {
144
- console.error("[Avaturn] Init error:", e);
145
- setInitError(e?.message || "Failed to init");
146
- setSdkStatus("Error loading Avaturn");
147
  }
148
  })();
149
 
@@ -151,12 +129,8 @@ function AvaturnModal({
151
  cancelled = true;
152
  if (sdkRef.current) {
153
  try {
154
- console.log('[Avaturn] Destroying SDK...');
155
  sdkRef.current.destroy?.();
156
- sdkRef.current = null;
157
- } catch (e) {
158
- console.error('[Avaturn] Destroy error:', e);
159
- }
160
  }
161
  };
162
  }, [open, subdomain]);
@@ -166,10 +140,7 @@ function AvaturnModal({
166
  return (
167
  <div style={{ position: "fixed", inset: 0, zIndex: 1000, background: "#000" }}>
168
  <button
169
- onClick={() => {
170
- console.log('[Avaturn] Close button clicked');
171
- onClose();
172
- }}
173
  style={{
174
  position: "absolute",
175
  top: 20,
@@ -183,57 +154,28 @@ function AvaturnModal({
183
  borderRadius: 10,
184
  cursor: "pointer",
185
  fontSize: 24,
186
- fontWeight: 700,
187
- display: "flex",
188
- alignItems: "center",
189
- justifyContent: "center",
190
  }}
191
  >
192
  βœ•
193
  </button>
194
- <div
195
- style={{
196
- position: "absolute",
197
- top: 20,
198
- left: 20,
199
- zIndex: 1002,
200
- background: "rgba(0,200,255,.95)",
201
- color: "#000",
202
- padding: "12px 18px",
203
- borderRadius: 10,
204
- fontSize: 14,
205
- fontWeight: 700,
206
- }}
207
- >
208
  {sdkStatus}
209
  </div>
210
- {initError && (
211
- <div
212
- style={{
213
- position: "absolute",
214
- top: 80,
215
- left: 20,
216
- zIndex: 1002,
217
- background: "rgba(255,0,0,.9)",
218
- color: "#fff",
219
- padding: "10px 14px",
220
- borderRadius: 8,
221
- fontSize: 13,
222
- maxWidth: 400,
223
- }}
224
- >
225
- ❌ {initError}
226
- <br />
227
- <small>Check console for details</small>
228
- </div>
229
- )}
230
  <div
231
  id="avaturn-sdk-container"
232
  ref={containerRef}
233
  style={{
234
  width: "100%",
235
  height: "100%",
236
- border: "none",
237
  background: "#1a1a1a",
238
  }}
239
  />
@@ -249,20 +191,9 @@ function Avatar({ liveBlend, avatarUrl, position, rotation, scale }: {
249
  rotation: [number, number, number];
250
  scale: number;
251
  }) {
252
- console.log('[Avatar Component] Mounting, URL length:', avatarUrl.length);
253
- console.log('[Avatar Component] URL type:', avatarUrl.substring(0, 30) + '...');
254
-
255
  const gltf = useGLTF(avatarUrl) as any;
256
  const { scene, animations } = gltf;
257
-
258
- // Also load the idle animation separately (for T-pose avatars)
259
  const idleAnimGLTF = useGLTF('/idle-animation.glb') as any;
260
-
261
- console.log('[Avatar Component] useGLTF completed, scene loaded');
262
- console.log('[Avatar Component] Avatar animations:', animations?.length || 0);
263
- console.log('[Avatar Component] Idle animation loaded:', idleAnimGLTF.animations?.length || 0);
264
-
265
- // Animation mixer for skeletal animations
266
  const mixerRef = useRef<THREE.AnimationMixer | null>(null);
267
 
268
  const morphMeshes = useMemo(() => {
@@ -279,11 +210,9 @@ function Avatar({ liveBlend, avatarUrl, position, rotation, scale }: {
279
  if (o.morphTargetDictionary && o.morphTargetInfluences) arr.push(o);
280
  });
281
 
282
- // Setup animation mixer
283
  const mixer = new THREE.AnimationMixer(scene);
284
  mixerRef.current = mixer;
285
 
286
- // Use avatar's own animation if it has one, otherwise use idle animation
287
  const animToPlay = (animations && animations.length > 0)
288
  ? animations[0]
289
  : (idleAnimGLTF.animations && idleAnimGLTF.animations.length > 0)
@@ -293,21 +222,16 @@ function Avatar({ liveBlend, avatarUrl, position, rotation, scale }: {
293
  if (animToPlay) {
294
  const action = mixer.clipAction(animToPlay);
295
  action.play();
296
- console.log('[Avatar] Playing animation:', animToPlay.name);
297
- } else {
298
- console.warn('[Avatar] No animation available');
299
  }
300
 
301
  return arr;
302
  }, [scene, animations, idleAnimGLTF.animations]);
303
 
304
  useFrame((_, dt) => {
305
- // Update skeletal animation
306
  if (mixerRef.current) {
307
  mixerRef.current.update(dt);
308
  }
309
 
310
- // Update morph targets (lip sync) with stronger influence
311
  morphMeshes.forEach((m) => {
312
  const dict = m.morphTargetDictionary as Record<string, number>;
313
  const infl = m.morphTargetInfluences as number[];
@@ -315,7 +239,6 @@ function Avatar({ liveBlend, avatarUrl, position, rotation, scale }: {
315
  const i = dict[name];
316
  if (i === undefined) return;
317
  const cur = infl[i] ?? 0;
318
- // Faster interpolation for more responsive lip sync
319
  infl[i] = cur + (target - cur) * Math.min(1, dt * 25);
320
  });
321
  Object.values(dict).forEach((idx) => {
@@ -329,23 +252,114 @@ function Avatar({ liveBlend, avatarUrl, position, rotation, scale }: {
329
  return <primitive object={scene} position={position} rotation={rotation} scale={scale} />;
330
  }
331
 
332
- /* -------------------- SMALL UI -------------------- */
333
- function GlowingOrb({ isActive }: { isActive: boolean }) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
334
  return (
335
- <div className="absolute top-1/2 left-1/2 transform -translate-x-1/2 -translate-y-1/2 pointer-events-none">
336
- <div
337
- className={`relative w-64 h-64 transition-all duration-1000 ${
338
- isActive ? "scale-100 opacity-100" : "scale-0 opacity-0"
339
- }`}
340
- >
341
- <div className="absolute inset-0 rounded-full bg-gradient-to-r from-blue-500/30 via-purple-500/30 to-pink-500/30 blur-3xl animate-pulse-slow" />
342
- <div className="absolute inset-4 rounded-full bg-gradient-to-r from-blue-400/40 via-purple-400/40 to-pink-400/40 blur-2xl animate-pulse-slower" />
343
- <div className="absolute inset-8 rounded-full bg-gradient-to-br from-blue-500/50 via-purple-500/50 to-pink-500/50 backdrop-blur-xl border border-white/20 shadow-2xl animate-breathe" />
344
- <div className="absolute inset-12 rounded-full bg-gradient-to-tr from-white/30 to-transparent animate-spin-very-slow" />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
345
  </div>
346
  </div>
347
  );
348
  }
 
 
349
  function MessageBubble({ message }: { message: Message }) {
350
  const isUser = message.role === "user";
351
  return (
@@ -368,34 +382,15 @@ function MessageBubble({ message }: { message: Message }) {
368
  </div>
369
  );
370
  }
371
- function MinimalButton({
372
- icon,
373
- onClick,
374
- label,
375
- }: {
376
- icon: React.ReactNode;
377
- onClick: () => void;
378
- label?: string;
379
- }) {
380
- return (
381
- <button onClick={onClick} className="relative group transition-all duration-300 hover:scale-110 active:scale-90">
382
- <div className="w-12 h-12 rounded-full backdrop-blur-xl bg-white/10 border border-white/20 shadow-lg flex items-center justify-center hover:bg-white/20 transition-all duration-300">
383
- {icon}
384
- </div>
385
- {label && (
386
- <div className="absolute bottom-full mb-2 left-1/2 transform -translate-x-1/2 opacity-0 group-hover:opacity-100 transition-opacity duration-200 pointer-events-none whitespace-nowrap">
387
- <div className="bg-black/80 backdrop-blur-xl text-white text-xs px-3 py-1.5 rounded-lg">
388
- {label}
389
- </div>
390
- </div>
391
- )}
392
- </button>
393
- );
394
- }
395
 
396
  /* -------------------- PAGE -------------------- */
397
  export default function Page() {
398
- const [status, setStatus] = useState("What can I do for you today?");
 
 
 
 
 
399
  const [faceEmotion, setFaceEmotion] = useState("Neutral");
400
  const [voiceEmotion, setVoiceEmotion] = useState("Neutral");
401
  const [isActive, setIsActive] = useState(false);
@@ -406,12 +401,9 @@ export default function Page() {
406
  const [showSettings, setShowSettings] = useState(false);
407
  const [selectedLanguage, setSelectedLanguage] = useState<"en" | "nl">("en");
408
  const [selectedVoice, setSelectedVoice] = useState<"male" | "female">("female");
409
- const [isSpeechPaused, setIsSpeechPaused] = useState(false);
410
  const [messages, setMessages] = useState<Message[]>([]);
411
  const [volume, setVolume] = useState(0.8);
412
 
413
- // Transform controls for testing
414
- const [showTransformControls, setShowTransformControls] = useState(false);
415
  const [avatarPosition, setAvatarPosition] = useState({ x: -0.01, y: -2.12, z: 0.06 });
416
  const [avatarRotation, setAvatarRotation] = useState({ x: 0.00, y: 0.51, z: 0.00 });
417
  const [avatarScale, setAvatarScale] = useState(1.25);
@@ -420,13 +412,6 @@ export default function Page() {
420
  const [avatarRenderKey, setAvatarRenderKey] = useState(0);
421
  const objectUrlRef = useRef<string | null>(null);
422
  const avatarUrlRef = useRef(DEFAULT_AVATAR);
423
- const lastRemoteUrlRef = useRef<string | null>(null);
424
-
425
- useEffect(() => {
426
- console.log('[Main] Avatar URL changed to:', avatarUrl.substring(0, 60) + '...');
427
- console.log('[Main] Avatar URL length:', avatarUrl.length);
428
- avatarUrlRef.current = avatarUrl;
429
- }, [avatarUrl]);
430
 
431
  const videoRef = useRef<HTMLVideoElement>(null);
432
  const audioRef = useRef<HTMLAudioElement>(null);
@@ -441,94 +426,92 @@ export default function Page() {
441
  const idxRef = useRef(0);
442
  const isPlayingRef = useRef(false);
443
 
 
444
  useEffect(() => {
445
- (async () => {
446
- const saved =
447
- localStorage.getItem("mrrrme_avatar_remote_url") ||
448
- localStorage.getItem("mrrrme_avatar_url");
449
- if (!saved) return;
450
- lastRemoteUrlRef.current = saved;
451
-
452
- try {
453
- if (objectUrlRef.current) {
454
- URL.revokeObjectURL(objectUrlRef.current);
455
- objectUrlRef.current = null;
456
- }
457
- let obj: string | null = null;
458
- if (isDataUrl(saved)) {
459
- obj = await toBlobObjectUrlFromDirect(saved);
460
- } else if (isHttpUrl(saved)) {
461
- try {
462
- obj = await toBlobObjectUrlViaProxy(saved);
463
- } catch {
464
- obj = await toBlobObjectUrlFromDirect(saved);
465
- }
466
- }
467
- if (obj) {
468
- objectUrlRef.current = obj;
469
- setAvatarUrl(obj);
470
- } else {
471
- setAvatarUrl(saved);
472
- }
473
- } catch {
474
- setAvatarUrl(saved);
475
- }
476
- })();
477
-
478
- return () => {
479
- if (objectUrlRef.current) {
480
- URL.revokeObjectURL(objectUrlRef.current);
481
- objectUrlRef.current = null;
482
- }
483
- };
484
  }, []);
485
 
486
- useEffect(() => {
487
- if (audioRef.current) audioRef.current.volume = volume;
488
- }, [volume]);
 
 
 
 
 
 
 
 
 
489
 
490
- useEffect(() => {
491
- if (showHistory) historyEndRef.current?.scrollIntoView({ behavior: "smooth" });
492
- }, [messages, showHistory]);
 
 
 
 
 
 
 
 
 
 
493
 
494
- async function startCapture() {
495
- if (isActive) return;
496
  try {
497
- if (!navigator.mediaDevices?.getUserMedia) throw new Error("Camera and microphone access unavailable.");
498
- const stream = await navigator.mediaDevices.getUserMedia({ video: { width: 640, height: 480 }, audio: true });
499
- if (videoRef.current) {
500
- videoRef.current.srcObject = stream;
501
- await (videoRef.current as HTMLVideoElement).play();
502
- }
503
- connectWebSocket();
504
- startVideoCapture();
505
- startAudioCapture(stream);
506
- startSpeechRecognition();
507
- setIsActive(true);
508
- setStatus("Listening...");
509
- } catch (err) {
510
- console.error("[Frontend] ❌ Error:", err);
511
- setStatus("Camera/microphone access denied");
512
- }
513
- }
 
 
514
 
515
  function connectWebSocket() {
516
  const ws = new WebSocket(BACKEND_WS);
517
  ws.onopen = () => {
518
- setStatus("Connected");
 
519
  wsRef.current = ws;
520
- // Send initial preferences
521
- ws.send(JSON.stringify({
522
- type: "preferences",
523
- voice: selectedVoice,
524
- language: selectedLanguage
525
- }));
526
  };
 
527
  ws.onmessage = async (event) => {
528
  const data = JSON.parse(event.data);
529
- if (data.type === "face_emotion") setFaceEmotion(data.emotion);
530
- else if (data.type === "voice_emotion") setVoiceEmotion(data.emotion);
531
- else if (data.type === "llm_response") {
 
 
 
 
 
 
 
 
 
 
 
532
  setMessages((prev) => [
533
  ...prev,
534
  { id: Date.now().toString(), role: "assistant", content: data.text, timestamp: new Date(), emotion: data.emotion },
@@ -548,8 +531,11 @@ export default function Page() {
548
  setIsAvatarSpeaking(false);
549
  shouldAutoRestartRef.current = true;
550
  }
 
 
551
  }
552
  };
 
553
  ws.onerror = () => setStatus("Connection error");
554
  ws.onclose = () => {
555
  setStatus("Disconnected");
@@ -557,6 +543,29 @@ export default function Page() {
557
  };
558
  }
559
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
560
  function startVideoCapture() {
561
  const canvas = document.createElement("canvas");
562
  const ctx = canvas.getContext("2d");
@@ -596,264 +605,96 @@ export default function Page() {
596
  setStatus("Speech not supported");
597
  return;
598
  }
599
- const isBrave = (navigator as any).brave?.isBrave?.name === "isBrave";
600
- if (isBrave) {
601
- setStatus("Use text input");
602
- return;
603
- }
604
- if (recognitionRef.current) {
605
- try { recognitionRef.current.stop(); } catch {}
606
- recognitionRef.current = null;
607
- }
608
- shouldAutoRestartRef.current = true;
609
 
610
  const recognition = new SpeechRecognition();
611
  recognition.continuous = true;
612
  recognition.interimResults = true;
613
  recognition.lang = selectedLanguage === "nl" ? "nl-NL" : "en-US";
614
- // Improved speech recognition settings
615
- recognition.maxAlternatives = 3; // Get multiple alternatives for better accuracy
616
- recognition.serviceURI = ""; // Use default service
617
 
618
  let finalTranscript = "";
619
- let interimTranscript = "";
620
- let timeoutId: NodeJS.Timeout | null = null;
621
- let silenceTimeoutId: NodeJS.Timeout | null = null;
622
- const SILENCE_TIMEOUT = 2000; // 2 seconds of silence before sending
623
- const MIN_CONFIDENCE = 0.7; // Minimum confidence threshold
624
-
625
- recognition.onstart = () => {
626
- setStatus("Listening...");
627
- finalTranscript = "";
628
- interimTranscript = "";
629
- };
630
 
631
  recognition.onresult = (event: any) => {
632
  if (isAvatarSpeaking) return;
633
 
634
- // Clear silence timeout when we get results
635
- if (silenceTimeoutId) {
636
- clearTimeout(silenceTimeoutId);
637
- silenceTimeoutId = null;
638
- }
639
-
640
- interimTranscript = "";
641
- let hasNewFinal = false;
642
-
643
  for (let i = event.resultIndex; i < event.results.length; i++) {
644
- const result = event.results[i];
645
- const transcript = result[0].transcript;
646
- const confidence = result[0].confidence || 0.5;
647
-
648
- if (result.isFinal) {
649
- // Only accept final results with good confidence
650
- if (confidence >= MIN_CONFIDENCE || transcript.trim().length > 2) {
651
- finalTranscript += transcript + " ";
652
- hasNewFinal = true;
653
- }
654
  } else {
655
- // Show interim results
656
- interimTranscript += transcript;
657
  }
658
  }
659
 
660
- // Update status with current transcript
661
- const displayText = interimTranscript || finalTranscript.trim();
662
- if (displayText) {
663
- setStatus(displayText.slice(-60)); // Show last 60 chars
664
- }
665
-
666
- // Handle final transcript
667
- if (hasNewFinal && finalTranscript.trim()) {
668
- // Clear any existing timeout
669
- if (timeoutId) clearTimeout(timeoutId);
670
-
671
- // Set a shorter timeout for final results (they're more reliable)
672
- timeoutId = setTimeout(() => {
673
- const textToSend = finalTranscript.trim();
674
- if (textToSend && wsRef.current && !isAvatarSpeaking && textToSend.length >= 2) {
675
- setMessages((prev) => [
676
- ...prev,
677
- { id: Date.now().toString(), role: "user", content: textToSend, timestamp: new Date() },
678
- ]);
679
- wsRef.current.send(JSON.stringify({
680
- type: "speech_end",
681
- text: textToSend,
682
- voice: selectedVoice
683
- }));
684
- finalTranscript = "";
685
- interimTranscript = "";
686
- setStatus("Processing...");
687
- }
688
- }, 800); // Shorter timeout for final results
689
- } else if (interimTranscript.trim() && !hasNewFinal) {
690
- // If we have interim results but no final, wait for silence
691
- if (silenceTimeoutId) clearTimeout(silenceTimeoutId);
692
- silenceTimeoutId = setTimeout(() => {
693
- const textToSend = interimTranscript.trim() || finalTranscript.trim();
694
- if (textToSend && wsRef.current && !isAvatarSpeaking && textToSend.length >= 3) {
695
- setMessages((prev) => [
696
- ...prev,
697
- { id: Date.now().toString(), role: "user", content: textToSend, timestamp: new Date() },
698
- ]);
699
- wsRef.current.send(JSON.stringify({
700
- type: "speech_end",
701
- text: textToSend,
702
- voice: selectedVoice
703
- }));
704
- finalTranscript = "";
705
- interimTranscript = "";
706
- setStatus("Processing...");
707
- }
708
- }, SILENCE_TIMEOUT);
709
  }
710
  };
711
 
712
  recognition.onerror = (event: any) => {
713
- console.log("[Speech Recognition] Error:", event.error);
714
- if (event.error === "no-speech") {
715
- // Don't show error for no-speech, just keep listening
716
- return;
717
- } else if (event.error === "not-allowed") {
718
- setStatus("Microphone permission denied");
719
- shouldAutoRestartRef.current = false;
720
- } else if (event.error === "audio-capture") {
721
- setStatus("No microphone found");
722
- shouldAutoRestartRef.current = false;
723
- } else if (event.error === "network") {
724
- setStatus("Network error - retrying...");
725
- // Will auto-restart
726
- } else if (event.error === "aborted") {
727
- // User or system aborted, don't restart
728
- shouldAutoRestartRef.current = false;
729
- } else {
730
  setStatus(`Speech error: ${event.error}`);
731
  }
732
  };
733
 
734
  recognition.onend = () => {
735
- // Clear timeouts
736
- if (timeoutId) clearTimeout(timeoutId);
737
- if (silenceTimeoutId) clearTimeout(silenceTimeoutId);
738
-
739
- // Auto-restart if we should
740
- if (shouldAutoRestartRef.current && recognitionRef.current === recognition && !isSpeechPaused) {
741
- setTimeout(() => {
742
- try {
743
- recognition.start();
744
- setStatus("Listening...");
745
- } catch (e) {
746
- console.log("[Speech Recognition] Restart failed:", e);
747
- // Try again after a longer delay
748
- setTimeout(() => {
749
- try {
750
- recognition.start();
751
- setStatus("Listening...");
752
- } catch {}
753
- }, 500);
754
- }
755
- }, 100);
756
  }
757
  };
758
 
759
- try {
760
- recognition.start();
761
- recognitionRef.current = recognition;
762
- } catch (e) {
763
- console.error("[Speech Recognition] Start failed:", e);
764
- setStatus("Speech recognition failed to start");
765
- }
766
  }
767
 
768
  async function playAvatarResponse(data: any) {
769
- if (!audioRef.current) {
770
- setIsAvatarSpeaking(false);
771
- shouldAutoRestartRef.current = true;
772
- return;
773
- }
774
  audioRef.current.pause();
775
- audioRef.current.currentTime = 0;
776
-
777
  isPlayingRef.current = false;
778
- idxRef.current = 0;
779
  setLiveBlend({});
780
  visemesRef.current = (data.visemes as Viseme[]).sort((a, b) => a.t - b.t);
781
 
782
  const protocol = window.location.protocol;
783
  const host = window.location.host;
784
- const audioPath = data.audio_url;
785
- const url = audioPath.startsWith("http") ? audioPath : `${protocol}//${host}${audioPath}`;
 
786
  audioRef.current.src = url;
787
 
788
  try {
789
- await Promise.race([
790
- new Promise((resolve, reject) => {
791
- if (!audioRef.current) return reject("No audio");
792
- audioRef.current.oncanplaythrough = () => resolve(true);
793
- audioRef.current.onerror = () => reject(new Error("Load failed"));
794
- audioRef.current.load();
795
- }),
796
- new Promise((_, reject) => setTimeout(() => reject(new Error("Timeout")), 5000)),
797
- ]);
798
 
799
  isPlayingRef.current = true;
800
- try {
801
- await audioRef.current.play();
802
- } catch {
803
- setStatus("πŸ”Š Tap to hear");
804
- await new Promise((resolve) => {
805
- let done = false;
806
- const tap = async () => {
807
- if (done) return;
808
- try {
809
- await audioRef.current?.play();
810
- done = true;
811
- setStatus("Speaking...");
812
- document.removeEventListener("click", tap);
813
- document.removeEventListener("touchstart", tap);
814
- resolve(true);
815
- } catch {}
816
- };
817
- document.addEventListener("click", tap);
818
- document.addEventListener("touchstart", tap);
819
- setTimeout(() => {
820
- if (!done) {
821
- document.removeEventListener("click", tap);
822
- document.removeEventListener("touchstart", tap);
823
- done = true;
824
- resolve(false);
825
- }
826
- }, 30000);
827
- });
828
- }
829
 
830
  await new Promise((resolve) => {
831
  if (!audioRef.current) return resolve(true);
832
- const audio = audioRef.current;
833
- audio.onended = () => resolve(true);
834
- const duration =
835
- visemesRef.current.length > 0
836
- ? visemesRef.current[visemesRef.current.length - 1].t + 1
837
- : 5;
838
- setTimeout(() => resolve(true), duration * 1000 + 1000);
839
  });
840
  } catch {
841
  setStatus("Audio error");
842
- isPlayingRef.current = false;
843
- setIsAvatarSpeaking(false);
844
- shouldAutoRestartRef.current = true;
845
- if (recognitionRef.current) {
846
- try { recognitionRef.current.start(); } catch {}
847
- }
848
- return;
849
  }
850
 
851
  setIsAvatarSpeaking(false);
852
  setStatus("Listening...");
853
- shouldAutoRestartRef.current = true;
854
  isPlayingRef.current = false;
855
  setLiveBlend({});
856
- await new Promise((r) => setTimeout(r, 800));
 
857
  if (recognitionRef.current) {
858
  try { recognitionRef.current.start(); } catch {}
859
  }
@@ -866,16 +707,9 @@ export default function Page() {
866
  if (a && visemesRef.current.length > 0 && isPlayingRef.current) {
867
  const t = a.currentTime;
868
  while (idxRef.current < visemesRef.current.length && visemesRef.current[idxRef.current].t <= t + 0.02) {
869
- const v = visemesRef.current[idxRef.current];
870
- setLiveBlend(v.blend);
871
  idxRef.current++;
872
  }
873
- if (a.ended) {
874
- setLiveBlend({});
875
- setStatus("Listening...");
876
- isPlayingRef.current = false;
877
- setIsAvatarSpeaking(false);
878
- }
879
  }
880
  raf = requestAnimationFrame(tick);
881
  };
@@ -883,27 +717,7 @@ export default function Page() {
883
  return () => cancelAnimationFrame(raf);
884
  }, []);
885
 
886
- useEffect(() => {
887
- return () => {
888
- if (wsRef.current) wsRef.current.close();
889
- if (mediaRecorderRef.current) mediaRecorderRef.current.stop();
890
- if (recognitionRef.current) {
891
- try { recognitionRef.current.stop(); recognitionRef.current = null; } catch {}
892
- }
893
- if (videoRef.current?.srcObject) {
894
- const stream = videoRef.current.srcObject as MediaStream;
895
- stream?.getTracks().forEach((t) => t.stop());
896
- }
897
- if (objectUrlRef.current) {
898
- URL.revokeObjectURL(objectUrlRef.current);
899
- objectUrlRef.current = null;
900
- }
901
- };
902
- }, []);
903
-
904
  const handleAvaturnExport = useCallback(async (remoteUrl: string) => {
905
- console.log('[Avatar] Handling export, URL type:', remoteUrl.substring(0, 50));
906
-
907
  const oldUrl = avatarUrlRef.current;
908
  try {
909
  if (oldUrl !== DEFAULT_AVATAR) {
@@ -911,176 +725,107 @@ export default function Page() {
911
  }
912
  } catch {}
913
 
914
- if (objectUrlRef.current && objectUrlRef.current !== remoteUrl) {
915
  URL.revokeObjectURL(objectUrlRef.current);
916
  objectUrlRef.current = null;
917
  }
918
 
919
- lastRemoteUrlRef.current = remoteUrl;
920
- setStatus("Loading new avatar…");
921
-
922
- try {
923
- if (isDataUrl(remoteUrl)) {
924
- console.log('[Avatar] Using data URL directly (no localStorage)');
925
-
926
- console.log('[Avatar] Closing modal...');
927
- setShowAvatarCreator(false);
928
-
929
- await new Promise(resolve => setTimeout(resolve, 300));
930
-
931
- console.log('[Avatar] Now setting avatar URL to data URL');
932
- flushSync(() => {
933
- setAvatarUrl(remoteUrl);
934
- setAvatarRenderKey(prev => prev + 1);
935
- });
936
- avatarUrlRef.current = remoteUrl;
937
- console.log('[Avatar] State updates flushed, avatarRenderKey incremented');
938
-
939
- localStorage.removeItem("mrrrme_avatar_remote_url");
940
- localStorage.removeItem("mrrrme_avatar_url");
941
- setStatus("Avatar updated! ✨");
942
- // Show config screen if we haven't configured before
943
- const hasConfiguredBefore = localStorage.getItem("mrrrme_has_configured");
944
- if (!hasConfiguredBefore) {
945
- setTimeout(() => {
946
- setStatus("What can I do for you today?");
947
- setShowConfigScreen(true);
948
- }, 2500);
949
- } else {
950
- setTimeout(() => {
951
- setStatus("What can I do for you today?");
952
- }, 2500);
953
- }
954
- return;
955
- }
956
-
957
- let objUrl: string | null = null;
958
-
959
- if (isHttpUrl(remoteUrl)) {
960
- try {
961
- objUrl = await toBlobObjectUrlViaProxy(remoteUrl);
962
- } catch {
963
- objUrl = await toBlobObjectUrlFromDirect(remoteUrl);
964
- }
965
- }
966
-
967
- if (objUrl) {
968
- objectUrlRef.current = objUrl;
969
- setAvatarUrl(objUrl);
970
- } else {
971
- setAvatarUrl(remoteUrl);
972
- }
973
-
974
- localStorage.setItem("mrrrme_avatar_remote_url", remoteUrl);
975
- localStorage.setItem("mrrrme_avatar_url", remoteUrl);
976
- setStatus("Avatar updated! ✨");
977
- } catch (e) {
978
- console.error("[Avatar] load failed:", e);
979
  setAvatarUrl(remoteUrl);
980
- setStatus("Avatar updated.");
981
- }
982
-
983
- // Show config screen if we haven't configured before
984
- const hasConfiguredBefore = localStorage.getItem("mrrrme_has_configured");
985
- if (!hasConfiguredBefore) {
986
- setTimeout(() => {
987
- setStatus("What can I do for you today?");
988
- setShowConfigScreen(true);
989
- }, 2500);
990
- } else {
991
- setTimeout(() => {
992
- setStatus("What can I do for you today?");
993
- }, 2500);
994
- }
995
  }, []);
996
 
 
 
 
 
997
  return (
998
  <div className="relative w-screen h-screen bg-gradient-to-br from-slate-950 via-purple-950 to-slate-900 overflow-hidden">
999
  <div className="absolute top-6 left-6 z-30">
1000
- <div className="text-white font-bold">MrrrMe</div>
1001
  <div className="text-white/60 text-sm">{status}</div>
1002
  </div>
1003
 
1004
- <GlowingOrb isActive={isActive && isAvatarSpeaking} />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1005
 
1006
- {/* Settings Modal - can be opened anytime */}
1007
  {showSettings && (
1008
  <>
1009
- <div className="absolute inset-0 bg-black/60 backdrop-blur-sm z-40 animate-fade-in" onClick={() => setShowSettings(false)} />
1010
- <div className="absolute inset-0 flex flex-col items-center justify-center z-50 animate-fade-in">
1011
- <div className="backdrop-blur-xl bg-white/10 border border-white/20 rounded-3xl p-8 md:p-12 max-w-md w-full mx-4 shadow-2xl relative">
1012
- <button
1013
- onClick={() => setShowSettings(false)}
1014
- className="absolute top-4 right-4 w-10 h-10 bg-white/10 rounded-xl flex items-center justify-center hover:bg-white/20 transition-all hover:rotate-90 duration-300"
1015
- >
1016
- <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1017
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
1018
- </svg>
1019
- </button>
1020
 
1021
- <h2 className="text-white text-3xl md:text-4xl font-bold mb-8 text-center">
1022
- Settings
1023
- </h2>
1024
-
1025
- <div className="space-y-6 mb-8">
1026
- {/* Language Selection */}
1027
  <div>
1028
- <label className="text-white/90 text-lg font-semibold mb-3 block">
1029
- Language / Taal
1030
- </label>
1031
  <div className="flex gap-3">
1032
  <button
1033
- onClick={() => {
1034
- setSelectedLanguage("en");
1035
- if (recognitionRef.current) {
1036
- try {
1037
- recognitionRef.current.stop();
1038
- recognitionRef.current.lang = "en-US";
1039
- if (!isSpeechPaused) {
1040
- recognitionRef.current.start();
1041
- }
1042
- } catch {}
1043
- }
1044
- if (wsRef.current) {
1045
- wsRef.current.send(JSON.stringify({
1046
- type: "preferences",
1047
- voice: selectedVoice,
1048
- language: "en"
1049
- }));
1050
- }
1051
- }}
1052
- className={`flex-1 py-4 px-6 rounded-xl font-semibold transition-all duration-300 ${
1053
  selectedLanguage === "en"
1054
- ? "bg-gradient-to-r from-blue-500 to-purple-600 text-white shadow-lg scale-105"
1055
- : "bg-white/10 text-white/70 hover:bg-white/20 border border-white/20"
1056
  }`}
1057
  >
1058
  English
1059
  </button>
1060
  <button
1061
- onClick={() => {
1062
- setSelectedLanguage("nl");
1063
- if (recognitionRef.current) {
1064
- try {
1065
- recognitionRef.current.stop();
1066
- recognitionRef.current.lang = "nl-NL";
1067
- if (!isSpeechPaused) {
1068
- recognitionRef.current.start();
1069
- }
1070
- } catch {}
1071
- }
1072
- if (wsRef.current) {
1073
- wsRef.current.send(JSON.stringify({
1074
- type: "preferences",
1075
- voice: selectedVoice,
1076
- language: "nl"
1077
- }));
1078
- }
1079
- }}
1080
- className={`flex-1 py-4 px-6 rounded-xl font-semibold transition-all duration-300 ${
1081
  selectedLanguage === "nl"
1082
- ? "bg-gradient-to-r from-blue-500 to-purple-600 text-white shadow-lg scale-105"
1083
- : "bg-white/10 text-white/70 hover:bg-white/20 border border-white/20"
1084
  }`}
1085
  >
1086
  Nederlands
@@ -1088,46 +833,25 @@ export default function Page() {
1088
  </div>
1089
  </div>
1090
 
1091
- {/* Voice Selection */}
1092
  <div>
1093
- <label className="text-white/90 text-lg font-semibold mb-3 block">
1094
- Voice
1095
- </label>
1096
  <div className="flex gap-3">
1097
  <button
1098
- onClick={() => {
1099
- setSelectedVoice("female");
1100
- if (wsRef.current) {
1101
- wsRef.current.send(JSON.stringify({
1102
- type: "preferences",
1103
- voice: "female",
1104
- language: selectedLanguage
1105
- }));
1106
- }
1107
- }}
1108
- className={`flex-1 py-4 px-6 rounded-xl font-semibold transition-all duration-300 ${
1109
  selectedVoice === "female"
1110
- ? "bg-gradient-to-r from-pink-500 to-rose-600 text-white shadow-lg scale-105"
1111
- : "bg-white/10 text-white/70 hover:bg-white/20 border border-white/20"
1112
  }`}
1113
  >
1114
  Female
1115
  </button>
1116
  <button
1117
- onClick={() => {
1118
- setSelectedVoice("male");
1119
- if (wsRef.current) {
1120
- wsRef.current.send(JSON.stringify({
1121
- type: "preferences",
1122
- voice: "male",
1123
- language: selectedLanguage
1124
- }));
1125
- }
1126
- }}
1127
- className={`flex-1 py-4 px-6 rounded-xl font-semibold transition-all duration-300 ${
1128
  selectedVoice === "male"
1129
- ? "bg-gradient-to-r from-blue-500 to-cyan-600 text-white shadow-lg scale-105"
1130
- : "bg-white/10 text-white/70 hover:bg-white/20 border border-white/20"
1131
  }`}
1132
  >
1133
  Male
@@ -1138,377 +862,103 @@ export default function Page() {
1138
 
1139
  <button
1140
  onClick={() => setShowSettings(false)}
1141
- className="w-full group relative transition-all duration-300 hover:scale-105 active:scale-95"
1142
  >
1143
- <div className="absolute inset-0 bg-gradient-to-r from-blue-500 to-purple-600 rounded-full blur-xl opacity-50 group-hover:opacity-75 transition-opacity duration-300" />
1144
- <div className="relative px-8 py-4 bg-gradient-to-r from-blue-500 to-purple-600 rounded-full shadow-2xl border border-white/20 flex items-center justify-center gap-3">
1145
- <span className="text-xl font-semibold text-white">Done</span>
1146
- </div>
1147
  </button>
1148
  </div>
1149
  </div>
1150
  </>
1151
  )}
1152
 
1153
- {showConfigScreen && !isActive && !showAvatarCreator && (
1154
- <div className="absolute inset-0 flex flex-col items-center justify-center z-50 animate-fade-in bg-gradient-to-br from-slate-950 via-purple-950 to-slate-900">
1155
- <div className="backdrop-blur-xl bg-white/10 border border-white/20 rounded-3xl p-8 md:p-12 max-w-md w-full mx-4 shadow-2xl">
1156
- <h2 className="text-white text-3xl md:text-4xl font-bold mb-8 text-center">
1157
- Configure Your Avatar
1158
- </h2>
1159
 
1160
- <div className="space-y-6 mb-8">
1161
- {/* Language Selection */}
1162
- <div>
1163
- <label className="text-white/90 text-lg font-semibold mb-3 block">
1164
- Language / Taal
1165
- </label>
1166
- <div className="flex gap-3">
1167
- <button
1168
- onClick={() => setSelectedLanguage("en")}
1169
- className={`flex-1 py-4 px-6 rounded-xl font-semibold transition-all duration-300 ${
1170
- selectedLanguage === "en"
1171
- ? "bg-gradient-to-r from-blue-500 to-purple-600 text-white shadow-lg scale-105"
1172
- : "bg-white/10 text-white/70 hover:bg-white/20 border border-white/20"
1173
- }`}
1174
- >
1175
- English
1176
- </button>
1177
- <button
1178
- onClick={() => setSelectedLanguage("nl")}
1179
- className={`flex-1 py-4 px-6 rounded-xl font-semibold transition-all duration-300 ${
1180
- selectedLanguage === "nl"
1181
- ? "bg-gradient-to-r from-blue-500 to-purple-600 text-white shadow-lg scale-105"
1182
- : "bg-white/10 text-white/70 hover:bg-white/20 border border-white/20"
1183
- }`}
1184
- >
1185
- Nederlands
1186
- </button>
1187
- </div>
1188
- </div>
1189
-
1190
- {/* Voice Selection */}
1191
- <div>
1192
- <label className="text-white/90 text-lg font-semibold mb-3 block">
1193
- Voice
1194
- </label>
1195
- <div className="flex gap-3">
1196
- <button
1197
- onClick={() => setSelectedVoice("female")}
1198
- className={`flex-1 py-4 px-6 rounded-xl font-semibold transition-all duration-300 ${
1199
- selectedVoice === "female"
1200
- ? "bg-gradient-to-r from-pink-500 to-rose-600 text-white shadow-lg scale-105"
1201
- : "bg-white/10 text-white/70 hover:bg-white/20 border border-white/20"
1202
- }`}
1203
- >
1204
- Female
1205
- </button>
1206
- <button
1207
- onClick={() => setSelectedVoice("male")}
1208
- className={`flex-1 py-4 px-6 rounded-xl font-semibold transition-all duration-300 ${
1209
- selectedVoice === "male"
1210
- ? "bg-gradient-to-r from-blue-500 to-cyan-600 text-white shadow-lg scale-105"
1211
- : "bg-white/10 text-white/70 hover:bg-white/20 border border-white/20"
1212
- }`}
1213
- >
1214
- Male
1215
- </button>
1216
- </div>
1217
- </div>
1218
- </div>
1219
-
1220
  <button
1221
  onClick={() => {
1222
- localStorage.setItem("mrrrme_has_configured", "true");
1223
  setShowConfigScreen(false);
1224
  startCapture();
1225
  }}
1226
- className="w-full group relative transition-all duration-300 hover:scale-105 active:scale-95"
1227
  >
1228
- <div className="absolute inset-0 bg-gradient-to-r from-blue-500 to-purple-600 rounded-full blur-xl opacity-50 group-hover:opacity-75 transition-opacity duration-300" />
1229
- <div className="relative px-8 py-4 bg-gradient-to-r from-blue-500 to-purple-600 rounded-full shadow-2xl border border-white/20 flex items-center justify-center gap-3">
1230
- <svg className="w-6 h-6 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1231
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M14.752 11.168l-3.197-2.132A1 1 0 0010 9.87v4.263a1 1 0 001.555.832l3.197-2.132a1 1 0 000-1.664z" />
1232
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
1233
- </svg>
1234
- <span className="text-xl font-semibold text-white">Start</span>
1235
- </div>
1236
  </button>
1237
  </div>
1238
  </div>
1239
  )}
1240
 
1241
  {!isActive && !showAvatarCreator && !showConfigScreen && (
1242
- <div className="absolute inset-0 flex flex-col items-center justify-center z-50 animate-fade-in">
1243
- <h1 className="text-white text-4xl md:text-6xl font-bold mb-4 tracking-tight animate-slide-down text-center px-4">
1244
- What Can I Do For You Today?
1245
  </h1>
1246
- <p className="text-white/60 text-lg md:text-xl mb-12 animate-slide-down text-center px-4" style={{ animationDelay: "0.1s" }}>
1247
- I can help with your emotions and wellness
1248
  </p>
1249
  <button
1250
  onClick={startCapture}
1251
- className="group relative transition-all duration-300 hover:scale-105 active:scale-95 animate-slide-up mb-6"
1252
- style={{ animationDelay: "0.2s" }}
1253
  >
1254
- <div className="absolute inset-0 bg-gradient-to-r from-blue-500 to-purple-600 rounded-full blur-xl opacity-50 group-hover:opacity-75 transition-opacity duration-300" />
1255
- <div className="relative px-12 py-4 bg-gradient-to-r from-blue-500 to-purple-600 rounded-full shadow-2xl border border-white/20 flex items-center gap-3">
1256
- <svg className="w-6 h-6 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1257
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11a7 7 0 01-7 7m0 0a7 7 0 01-7-7m7 7v4m0 0H8m4 0h4m-4-8a3 3 0 01-3-3V5a3 3 0 116 0v6a3 3 0 01-3 3z" />
1258
- </svg>
1259
- <span className="text-xl font-semibold text-white">Start Conversation</span>
1260
- </div>
1261
  </button>
1262
  <button
1263
  onClick={() => setShowAvatarCreator(true)}
1264
- className="backdrop-blur-xl bg-white/10 hover:bg-white/20 border border-white/20 rounded-2xl px-8 py-4 text-white font-semibold transition-all duration-300 hover:scale-105 active:scale-95 shadow-lg animate-slide-up"
1265
- style={{ animationDelay: "0.3s" }}
1266
  >
1267
- 🎭 Create Your Avatar
1268
  </button>
1269
  </div>
1270
  )}
1271
 
1272
- {isActive && (
1273
- <div className="absolute top-6 right-6 z-30 animate-slide-down">
1274
- <div className="flex items-center gap-3">
1275
- <MinimalButton
1276
- icon={
1277
- <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1278
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M16 7a4 4 0 11-8 0 4 4 0 018 0zM12 14a7 7 0 00-7 7h14a7 7 0 00-7-7z" />
1279
- </svg>
1280
- }
1281
- onClick={() => setShowAvatarCreator(true)}
1282
- label="Create Avatar"
1283
- />
1284
- <MinimalButton
1285
- icon={
1286
- <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1287
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M10.325 4.317c.426-1.756 2.924-1.756 3.35 0a1.724 1.724 0 002.573 1.066c1.543-.94 3.31.826 2.37 2.37a1.724 1.724 0 001.065 2.572c1.756.426 1.756 2.924 0 3.35a1.724 1.724 0 00-1.066 2.573c.94 1.543-.826 3.31-2.37 2.37a1.724 1.724 0 00-2.572 1.065c-.426 1.756-2.924 1.756-3.35 0a1.724 1.724 0 00-2.573-1.066c-1.543.94-3.31-.826-2.37-2.37a1.724 1.724 0 00-1.065-2.572c-1.756-.426-1.756-2.924 0-3.35a1.724 1.724 0 001.066-2.573c-.94-1.543.826-3.31 2.37-2.37.996.608 2.296.07 2.572-1.065z" />
1288
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 12a3 3 0 11-6 0 3 3 0 016 0z" />
1289
- </svg>
1290
- }
1291
- onClick={() => setShowSettings(true)}
1292
- label="Settings"
1293
- />
1294
- <MinimalButton
1295
- icon={
1296
- <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1297
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 8V4m0 0h4M4 4l5 5m11-1V4m0 0h-4m4 0l-5 5M4 16v4m0 0h4m-4 0l5-5m11 5l-5-5m5 5v-4m0 4h-4" />
1298
- </svg>
1299
- }
1300
- onClick={() => setShowTransformControls(!showTransformControls)}
1301
- label="Adjust Position"
1302
- />
1303
- <MinimalButton
1304
- icon={
1305
- <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1306
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 8v4l3 3m6-3a9 9 0 11-18 0 9 9 0 0118 0z" />
1307
- </svg>
1308
- }
1309
- onClick={() => setShowHistory(!showHistory)}
1310
- label="History"
1311
- />
1312
- {!isSpeechPaused ? (
1313
- <MinimalButton
1314
- icon={
1315
- <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1316
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M10 9v6m4-6v6m7-3a9 9 0 11-18 0 9 9 0 0118 0z" />
1317
- </svg>
1318
- }
1319
- onClick={() => {
1320
- if (recognitionRef.current) {
1321
- try {
1322
- recognitionRef.current.stop();
1323
- shouldAutoRestartRef.current = false;
1324
- setIsSpeechPaused(true);
1325
- setStatus("Paused");
1326
- } catch {}
1327
- }
1328
- }}
1329
- label="Pause"
1330
- />
1331
- ) : (
1332
- <MinimalButton
1333
- icon={
1334
- <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1335
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M14.752 11.168l-3.197-2.132A1 1 0 0010 9.87v4.263a1 1 0 001.555.832l3.197-2.132a1 1 0 000-1.664z" />
1336
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
1337
- </svg>
1338
- }
1339
- onClick={() => {
1340
- shouldAutoRestartRef.current = true;
1341
- if (recognitionRef.current) {
1342
- try {
1343
- recognitionRef.current.start();
1344
- setIsSpeechPaused(false);
1345
- setStatus("Listening...");
1346
- } catch {
1347
- startSpeechRecognition();
1348
- setIsSpeechPaused(false);
1349
- setStatus("Listening...");
1350
- }
1351
- } else {
1352
- startSpeechRecognition();
1353
- setIsSpeechPaused(false);
1354
- setStatus("Listening...");
1355
- }
1356
- }}
1357
- label="Resume"
1358
- />
1359
- )}
1360
- </div>
1361
- </div>
1362
- )}
1363
-
1364
  {showHistory && isActive && (
1365
  <>
1366
- <div className="absolute inset-0 bg-black/60 backdrop-blur-sm z-40 animate-fade-in" onClick={() => setShowHistory(false)} />
1367
- <div className="absolute top-0 right-0 h-full z-50 backdrop-blur-2xl bg-black/40 border-l border-white/10 shadow-2xl w-full md:w-[400px] animate-slide-left">
1368
  <div className="h-full flex flex-col p-6">
1369
  <div className="flex items-center justify-between mb-6">
1370
- <h2 className="text-white text-2xl font-bold">Conversation</h2>
1371
  <button
1372
  onClick={() => setShowHistory(false)}
1373
- className="w-10 h-10 bg-white/10 rounded-xl flex items-center justify-center hover:bg-white/20 transition-all hover:rotate-90 duration-300"
1374
  >
1375
- <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1376
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
1377
- </svg>
1378
  </button>
1379
  </div>
1380
- <div className="flex-1 overflow-y-auto scrollbar-thin scrollbar-thumb-white/20 scrollbar-track-transparent pr-2">
1381
- {messages.length === 0 ? (
1382
- <div className="flex flex-col items-center justify-center h-full">
1383
- <div className="w-16 h-16 bg-white/5 rounded-2xl flex items-center justify-center mb-4">
1384
- <svg className="w-8 h-8 text-white/30" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1385
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M8 12h.01M12 12h.01M16 12h.01M21 12c0 4.418-4.03 8-9 8a9.863 9.863 0 01-4.255-.949L3 20l1.395-3.72C3.512 15.042 3 13.574 3 12c0-4.418 4.03-8 9-8s9 3.582 9 8z" />
1386
- </svg>
1387
- </div>
1388
- <p className="text-white/40 text-sm text-center">No messages yet<br/>Start a conversation!</p>
1389
- </div>
1390
- ) : (
1391
- <>
1392
- {messages.map((m) => (
1393
- <MessageBubble key={m.id} message={m} />
1394
- ))}
1395
- <div ref={historyEndRef} />
1396
- </>
1397
- )}
1398
  </div>
1399
  </div>
1400
  </div>
1401
  </>
1402
  )}
1403
 
1404
- {/* Transform Controls Panel */}
1405
- {showTransformControls && (
1406
- <div className="absolute top-0 left-0 h-full z-50 backdrop-blur-2xl bg-black/40 border-r border-white/10 shadow-2xl w-full md:w-[350px] animate-slide-left overflow-y-auto">
1407
- <div className="h-full flex flex-col p-6">
1408
- <div className="flex items-center justify-between mb-6">
1409
- <h2 className="text-white text-xl font-bold">Avatar Adjustments</h2>
1410
- <button
1411
- onClick={() => setShowTransformControls(false)}
1412
- className="w-10 h-10 bg-white/10 rounded-xl flex items-center justify-center hover:bg-white/20 transition-all hover:rotate-90 duration-300"
1413
- >
1414
- <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
1415
- <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
1416
- </svg>
1417
- </button>
1418
- </div>
1419
-
1420
- <div className="space-y-4">
1421
- {/* Position */}
1422
- <div className="bg-white/5 rounded-xl p-4">
1423
- <h3 className="text-white text-sm font-bold mb-3">Position</h3>
1424
- <div className="space-y-2">
1425
- <div>
1426
- <label className="text-white/60 text-xs">X: <span className="text-green-400 font-mono">{avatarPosition.x.toFixed(2)}</span></label>
1427
- <input type="range" min="-5" max="5" step="0.01" value={avatarPosition.x} onChange={(e) => setAvatarPosition({...avatarPosition, x: parseFloat(e.target.value)})} className="w-full" />
1428
- </div>
1429
- <div>
1430
- <label className="text-white/60 text-xs">Y: <span className="text-green-400 font-mono">{avatarPosition.y.toFixed(2)}</span></label>
1431
- <input type="range" min="-5" max="5" step="0.01" value={avatarPosition.y} onChange={(e) => setAvatarPosition({...avatarPosition, y: parseFloat(e.target.value)})} className="w-full" />
1432
- </div>
1433
- <div>
1434
- <label className="text-white/60 text-xs">Z: <span className="text-green-400 font-mono">{avatarPosition.z.toFixed(2)}</span></label>
1435
- <input type="range" min="-5" max="5" step="0.01" value={avatarPosition.z} onChange={(e) => setAvatarPosition({...avatarPosition, z: parseFloat(e.target.value)})} className="w-full" />
1436
- </div>
1437
- </div>
1438
- </div>
1439
-
1440
- {/* Rotation */}
1441
- <div className="bg-white/5 rounded-xl p-4">
1442
- <h3 className="text-white text-sm font-bold mb-3">Rotation</h3>
1443
- <div className="space-y-2">
1444
- <div>
1445
- <label className="text-white/60 text-xs">X (Pitch): <span className="text-green-400 font-mono">{avatarRotation.x.toFixed(2)}</span></label>
1446
- <input type="range" min="-3.14" max="3.14" step="0.01" value={avatarRotation.x} onChange={(e) => setAvatarRotation({...avatarRotation, x: parseFloat(e.target.value)})} className="w-full" />
1447
- </div>
1448
- <div>
1449
- <label className="text-white/60 text-xs">Y (Turn): <span className="text-green-400 font-mono">{avatarRotation.y.toFixed(2)}</span></label>
1450
- <input type="range" min="-3.14" max="3.14" step="0.01" value={avatarRotation.y} onChange={(e) => setAvatarRotation({...avatarRotation, y: parseFloat(e.target.value)})} className="w-full" />
1451
- </div>
1452
- <div>
1453
- <label className="text-white/60 text-xs">Z (Roll): <span className="text-green-400 font-mono">{avatarRotation.z.toFixed(2)}</span></label>
1454
- <input type="range" min="-3.14" max="3.14" step="0.01" value={avatarRotation.z} onChange={(e) => setAvatarRotation({...avatarRotation, z: parseFloat(e.target.value)})} className="w-full" />
1455
- </div>
1456
- </div>
1457
- </div>
1458
-
1459
- {/* Scale */}
1460
- <div className="bg-white/5 rounded-xl p-4">
1461
- <h3 className="text-white text-sm font-bold mb-3">Scale</h3>
1462
- <div>
1463
- <label className="text-white/60 text-xs">Size: <span className="text-green-400 font-mono">{avatarScale.toFixed(2)}</span></label>
1464
- <input type="range" min="0.1" max="3" step="0.01" value={avatarScale} onChange={(e) => setAvatarScale(parseFloat(e.target.value))} className="w-full" />
1465
- </div>
1466
- </div>
1467
-
1468
- {/* Copy Code Button */}
1469
- <button
1470
- onClick={() => {
1471
- const code = `position={[${avatarPosition.x.toFixed(2)}, ${avatarPosition.y.toFixed(2)}, ${avatarPosition.z.toFixed(2)}]}
1472
- rotation={[${avatarRotation.x.toFixed(2)}, ${avatarRotation.y.toFixed(2)}, ${avatarRotation.z.toFixed(2)}]}
1473
- scale={${avatarScale.toFixed(2)}}`;
1474
- navigator.clipboard.writeText(code);
1475
- alert('βœ… Code copied to clipboard!');
1476
- }}
1477
- className="w-full bg-green-600 hover:bg-green-700 text-white font-bold py-3 px-4 rounded-lg transition-all"
1478
- >
1479
- πŸ“‹ Copy Values
1480
- </button>
1481
- </div>
1482
- </div>
1483
- </div>
1484
- )}
1485
-
1486
  {isActive && (
1487
- <div className="absolute bottom-8 left-1/2 transform -translate-x-1/2 w-full max-w-2xl px-6 z-30 animate-slide-up">
1488
- <div className="backdrop-blur-xl bg-white/10 border border-white/20 rounded-full shadow-lg overflow-hidden">
1489
- <input
1490
- type="text"
1491
- placeholder="Type or speak..."
1492
- className="w-full px-8 py-4 bg-transparent text-white placeholder-white/40 focus:outline-none text-base"
1493
- onKeyDown={(e) => {
1494
- if (e.key === "Enter" && wsRef.current && !isAvatarSpeaking) {
1495
- const text = (e.target as HTMLInputElement).value;
1496
- if (text.trim()) {
1497
- setMessages((prev) => [
1498
- ...prev,
1499
- { id: Date.now().toString(), role: "user", content: text, timestamp: new Date() },
1500
- ]);
1501
- wsRef.current.send(JSON.stringify({
1502
- type: "speech_end",
1503
- text,
1504
- voice: selectedVoice
1505
- }));
1506
- (e.target as HTMLInputElement).value = "";
1507
- }
1508
  }
1509
- }}
1510
- />
1511
- </div>
1512
  </div>
1513
  )}
1514
 
@@ -1521,21 +971,12 @@ scale={${avatarScale.toFixed(2)}}`;
1521
  position: 'absolute',
1522
  inset: 0,
1523
  zIndex: showAvatarCreator ? 0 : 1,
1524
- transition: 'opacity 0.3s ease'
1525
  }}>
1526
  <Canvas camera={{ position: [0, 0.2, 1.5], fov: 50 }}>
1527
  <Environment preset="studio" />
1528
  <directionalLight position={[5, 5, 5]} intensity={1} />
1529
  <ambientLight intensity={0.5} />
1530
- <React.Suspense
1531
- fallback={
1532
- <Html center>
1533
- <div className="backdrop-blur-xl bg-white/10 border border-white/20 rounded-2xl px-6 py-3">
1534
- <span className="text-white text-sm font-medium">Loading…</span>
1535
- </div>
1536
- </Html>
1537
- }
1538
- >
1539
  {avatarUrl && <Avatar
1540
  key={avatarRenderKey}
1541
  liveBlend={liveBlend}
@@ -1548,37 +989,15 @@ scale={${avatarScale.toFixed(2)}}`;
1548
  </Canvas>
1549
  </div>
1550
 
1551
- {/* Avaturn SDK Modal */}
1552
  <AvaturnModal
1553
  open={showAvatarCreator}
1554
  onClose={() => setShowAvatarCreator(false)}
1555
  onExport={handleAvaturnExport}
1556
- subdomain="mrrrme"
1557
  />
1558
 
1559
  <style jsx global>{`
1560
- @keyframes fade-in { from { opacity: 0; } to { opacity: 1; } }
1561
  @keyframes slide-up { from { opacity: 0; transform: translateY(30px);} to { opacity: 1; transform: translateY(0);} }
1562
- @keyframes slide-down { from { opacity: 0; transform: translateY(-30px);} to { opacity: 1; } }
1563
- @keyframes slide-left { from { opacity: 0; transform: translateX(30px);} to { opacity: 1; transform: translateX(0);} }
1564
- @keyframes spin-slow { from { transform: rotate(0deg);} to { transform: rotate(360deg);} }
1565
- @keyframes spin-very-slow { from { transform: rotate(0deg);} to { transform: rotate(360deg);} }
1566
- @keyframes pulse-slow { 0%,100%{opacity:.5;transform:scale(1)} 50%{opacity:.8;transform:scale(1.05)} }
1567
- @keyframes pulse-slower { 0%,100%{opacity:.4;transform:scale(1)} 50%{opacity:.7;transform:scale(1.1)} }
1568
- @keyframes breathe { 0%,100%{transform:scale(1);opacity:.5} 50%{transform:scale(1.1);opacity:.8} }
1569
- .animate-fade-in{animation:fade-in .5s ease-out}
1570
  .animate-slide-up{animation:slide-up .5s ease-out}
1571
- .animate-slide-down{animation:slide-down .5s ease-out}
1572
- .animate-slide-left{animation:slide-left .3s ease-out}
1573
- .animate-spin-slow{animation:spin-slow 20s linear infinite}
1574
- .animate-spin-very-slow{animation:spin-very-slow 40s linear infinite}
1575
- .animate-pulse-slow{animation:pulse-slow 3s ease-in-out infinite}
1576
- .animate-pulse-slower{animation:pulse-slower 4s ease-in-out infinite}
1577
- .animate-breathe{animation:breathe 3s ease-in-out infinite}
1578
- .scrollbar-thin::-webkit-scrollbar{width:6px}
1579
- .scrollbar-thin::-webkit-scrollbar-track{background:transparent}
1580
- .scrollbar-thin::-webkit-scrollbar-thumb{background:rgba(255,255,255,.2);border-radius:3px}
1581
- .scrollbar-thin::-webkit-scrollbar-thumb:hover{background:rgba(255,255,255,.3)}
1582
  `}</style>
1583
  </div>
1584
  );
 
7
  import * as THREE from "three";
8
 
9
  /* -------------------- CONSTANTS & TYPES -------------------- */
10
+ const DEFAULT_AVATAR = "";
11
 
12
  const getWebSocketURL = () => {
13
  if (typeof window === "undefined") return "ws://localhost:8000/ws";
14
+ const protocol = window.location.protocol === "https:" : "wss:" ? "ws:";
15
  return `${protocol}//${window.location.host}/ws`;
16
  };
17
  const BACKEND_WS = getWebSocketURL();
 
28
 
29
  /* -------------------- DYNAMIC CDN IMPORT -------------------- */
30
  async function importFromCdn(url: string) {
 
 
31
  return (0, eval)(`import(${JSON.stringify(url)})`);
32
  }
33
 
 
54
  return URL.createObjectURL(blob);
55
  }
56
 
57
+ /* -------------------- AVATURN MODAL -------------------- */
58
  function AvaturnModal({
59
  open,
60
  onClose,
 
67
  subdomain?: string;
68
  }) {
69
  const containerRef = React.useRef<HTMLDivElement | null>(null);
 
70
  const [sdkStatus, setSdkStatus] = useState<string>("Loading...");
71
  const sdkRef = useRef<any>(null);
72
  const onExportRef = useRef(onExport);
 
80
  let cancelled = false;
81
 
82
  (async () => {
83
+ try:
 
84
  setSdkStatus("Loading SDK...");
85
 
86
  const mod: any = await importFromCdn(
 
88
  );
89
 
90
  if (cancelled) return;
 
91
 
92
  const AvaturnSDK = mod?.AvaturnSDK;
93
  if (!AvaturnSDK) {
94
+ throw new Error("AvaturnSDK not found");
95
  }
96
 
 
97
  const sdk = new AvaturnSDK();
98
  sdkRef.current = sdk;
99
 
100
+ setSdkStatus("Initializing...");
101
  const url = `https://${subdomain}.avaturn.dev`;
 
102
 
103
  await sdk.init(containerRef.current!, { url });
104
+ setSdkStatus("Ready!");
 
 
105
 
106
  sdk.on("export", (data: any) => {
 
 
 
107
  const glb =
108
  data?.links?.glb?.url ||
109
  data?.links?.glb ||
 
114
  data?.data?.glb?.url ||
115
  data?.data?.glb;
116
 
 
 
117
  if (glb) {
118
  onExportRef.current(glb);
 
 
 
119
  }
120
  });
121
 
 
 
 
 
122
  } catch (e: any) {
123
+ console.error("[Avaturn] Error:", e);
124
+ setSdkStatus("Error loading");
 
125
  }
126
  })();
127
 
 
129
  cancelled = true;
130
  if (sdkRef.current) {
131
  try {
 
132
  sdkRef.current.destroy?.();
133
+ } catch {}
 
 
 
134
  }
135
  };
136
  }, [open, subdomain]);
 
140
  return (
141
  <div style={{ position: "fixed", inset: 0, zIndex: 1000, background: "#000" }}>
142
  <button
143
+ onClick={onClose}
 
 
 
144
  style={{
145
  position: "absolute",
146
  top: 20,
 
154
  borderRadius: 10,
155
  cursor: "pointer",
156
  fontSize: 24,
 
 
 
 
157
  }}
158
  >
159
  βœ•
160
  </button>
161
+ <div style={{
162
+ position: "absolute",
163
+ top: 20,
164
+ left: 20,
165
+ zIndex: 1002,
166
+ background: "rgba(0,200,255,.95)",
167
+ color: "#000",
168
+ padding: "12px 18px",
169
+ borderRadius: 10,
170
+ }}>
 
 
 
 
171
  {sdkStatus}
172
  </div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
173
  <div
174
  id="avaturn-sdk-container"
175
  ref={containerRef}
176
  style={{
177
  width: "100%",
178
  height: "100%",
 
179
  background: "#1a1a1a",
180
  }}
181
  />
 
191
  rotation: [number, number, number];
192
  scale: number;
193
  }) {
 
 
 
194
  const gltf = useGLTF(avatarUrl) as any;
195
  const { scene, animations } = gltf;
 
 
196
  const idleAnimGLTF = useGLTF('/idle-animation.glb') as any;
 
 
 
 
 
 
197
  const mixerRef = useRef<THREE.AnimationMixer | null>(null);
198
 
199
  const morphMeshes = useMemo(() => {
 
210
  if (o.morphTargetDictionary && o.morphTargetInfluences) arr.push(o);
211
  });
212
 
 
213
  const mixer = new THREE.AnimationMixer(scene);
214
  mixerRef.current = mixer;
215
 
 
216
  const animToPlay = (animations && animations.length > 0)
217
  ? animations[0]
218
  : (idleAnimGLTF.animations && idleAnimGLTF.animations.length > 0)
 
222
  if (animToPlay) {
223
  const action = mixer.clipAction(animToPlay);
224
  action.play();
 
 
 
225
  }
226
 
227
  return arr;
228
  }, [scene, animations, idleAnimGLTF.animations]);
229
 
230
  useFrame((_, dt) => {
 
231
  if (mixerRef.current) {
232
  mixerRef.current.update(dt);
233
  }
234
 
 
235
  morphMeshes.forEach((m) => {
236
  const dict = m.morphTargetDictionary as Record<string, number>;
237
  const infl = m.morphTargetInfluences as number[];
 
239
  const i = dict[name];
240
  if (i === undefined) return;
241
  const cur = infl[i] ?? 0;
 
242
  infl[i] = cur + (target - cur) * Math.min(1, dt * 25);
243
  });
244
  Object.values(dict).forEach((idx) => {
 
252
  return <primitive object={scene} position={position} rotation={rotation} scale={scale} />;
253
  }
254
 
255
+ /* -------------------- LOGIN SCREEN -------------------- */
256
+ function LoginScreen({ onLogin }: { onLogin: (token: string, username: string, summary: string | null) => void }) {
257
+ const [isLogin, setIsLogin] = useState(true);
258
+ const [username, setUsername] = useState("");
259
+ const [password, setPassword] = useState("");
260
+ const [error, setError] = useState("");
261
+ const [loading, setLoading] = useState(false);
262
+
263
+ const handleSubmit = async () => {
264
+ if (!username || !password) {
265
+ setError("Please fill in all fields");
266
+ return;
267
+ }
268
+
269
+ setLoading(true);
270
+ setError("");
271
+
272
+ const endpoint = isLogin ? "/api/login" : "/api/signup";
273
+ const protocol = window.location.protocol;
274
+ const host = window.location.host;
275
+ const url = `${protocol}//${host}${endpoint}`;
276
+
277
+ try {
278
+ const response = await fetch(url, {
279
+ method: "POST",
280
+ headers: { "Content-Type": "application/json" },
281
+ body: JSON.stringify({ username, password }),
282
+ });
283
+
284
+ const data = await response.json();
285
+
286
+ if (response.ok) {
287
+ if (isLogin) {
288
+ localStorage.setItem("mrrrme_token", data.token);
289
+ localStorage.setItem("mrrrme_username", data.username);
290
+ onLogin(data.token, data.username, data.summary);
291
+ } else {
292
+ setIsLogin(true);
293
+ setError("Account created! Please login.");
294
+ }
295
+ } else {
296
+ setError(data.detail || "Authentication failed");
297
+ }
298
+ } catch (err) {
299
+ setError("Connection error");
300
+ } finally {
301
+ setLoading(false);
302
+ }
303
+ };
304
+
305
  return (
306
+ <div className="absolute inset-0 flex items-center justify-center z-50 bg-gradient-to-br from-slate-950 via-purple-950 to-slate-900">
307
+ <div className="backdrop-blur-xl bg-white/10 border border-white/20 rounded-3xl p-8 md:p-12 max-w-md w-full mx-4 shadow-2xl">
308
+ <h1 className="text-white text-4xl md:text-5xl font-bold mb-3 text-center">
309
+ MrrrMe
310
+ </h1>
311
+ <p className="text-white/60 text-center mb-8">
312
+ Your Emotion AI Companion
313
+ </p>
314
+
315
+ <div className="space-y-4 mb-6">
316
+ <input
317
+ type="text"
318
+ placeholder="Username"
319
+ value={username}
320
+ onChange={(e) => setUsername(e.target.value)}
321
+ onKeyDown={(e) => e.key === "Enter" && handleSubmit()}
322
+ className="w-full px-4 py-3 rounded-xl bg-white/10 border border-white/20 text-white placeholder-white/40 focus:outline-none focus:border-blue-400 transition-all"
323
+ />
324
+ <input
325
+ type="password"
326
+ placeholder="Password"
327
+ value={password}
328
+ onChange={(e) => setPassword(e.target.value)}
329
+ onKeyDown={(e) => e.key === "Enter" && handleSubmit()}
330
+ className="w-full px-4 py-3 rounded-xl bg-white/10 border border-white/20 text-white placeholder-white/40 focus:outline-none focus:border-blue-400 transition-all"
331
+ />
332
+ </div>
333
+
334
+ {error && (
335
+ <div className="mb-4 p-3 rounded-lg bg-red-500/20 border border-red-500/50 text-red-200 text-sm">
336
+ {error}
337
+ </div>
338
+ )}
339
+
340
+ <button
341
+ onClick={handleSubmit}
342
+ disabled={loading}
343
+ className="w-full mb-4 py-3 px-6 rounded-full bg-gradient-to-r from-blue-500 to-purple-600 text-white font-semibold hover:scale-105 transition-all disabled:opacity-50 disabled:scale-100"
344
+ >
345
+ {loading ? "Please wait..." : isLogin ? "Login" : "Sign Up"}
346
+ </button>
347
+
348
+ <button
349
+ onClick={() => {
350
+ setIsLogin(!isLogin);
351
+ setError("");
352
+ }}
353
+ className="w-full text-white/60 hover:text-white transition-all text-sm"
354
+ >
355
+ {isLogin ? "Need an account? Sign up" : "Have an account? Login"}
356
+ </button>
357
  </div>
358
  </div>
359
  );
360
  }
361
+
362
+ /* -------------------- MESSAGE BUBBLE -------------------- */
363
  function MessageBubble({ message }: { message: Message }) {
364
  const isUser = message.role === "user";
365
  return (
 
382
  </div>
383
  );
384
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
385
 
386
  /* -------------------- PAGE -------------------- */
387
  export default function Page() {
388
+ const [isAuthenticated, setIsAuthenticated] = useState(false);
389
+ const [username, setUsername] = useState("");
390
+ const [userToken, setUserToken] = useState("");
391
+ const [userSummary, setUserSummary] = useState<string | null>(null);
392
+
393
+ const [status, setStatus] = useState("Loading...");
394
  const [faceEmotion, setFaceEmotion] = useState("Neutral");
395
  const [voiceEmotion, setVoiceEmotion] = useState("Neutral");
396
  const [isActive, setIsActive] = useState(false);
 
401
  const [showSettings, setShowSettings] = useState(false);
402
  const [selectedLanguage, setSelectedLanguage] = useState<"en" | "nl">("en");
403
  const [selectedVoice, setSelectedVoice] = useState<"male" | "female">("female");
 
404
  const [messages, setMessages] = useState<Message[]>([]);
405
  const [volume, setVolume] = useState(0.8);
406
 
 
 
407
  const [avatarPosition, setAvatarPosition] = useState({ x: -0.01, y: -2.12, z: 0.06 });
408
  const [avatarRotation, setAvatarRotation] = useState({ x: 0.00, y: 0.51, z: 0.00 });
409
  const [avatarScale, setAvatarScale] = useState(1.25);
 
412
  const [avatarRenderKey, setAvatarRenderKey] = useState(0);
413
  const objectUrlRef = useRef<string | null>(null);
414
  const avatarUrlRef = useRef(DEFAULT_AVATAR);
 
 
 
 
 
 
 
415
 
416
  const videoRef = useRef<HTMLVideoElement>(null);
417
  const audioRef = useRef<HTMLAudioElement>(null);
 
426
  const idxRef = useRef(0);
427
  const isPlayingRef = useRef(false);
428
 
429
+ // Check for existing session
430
  useEffect(() => {
431
+ const token = localStorage.getItem("mrrrme_token");
432
+ const savedUsername = localStorage.getItem("mrrrme_username");
433
+
434
+ if (token && savedUsername) {
435
+ // Auto-login
436
+ setUserToken(token);
437
+ setUsername(savedUsername);
438
+ setIsAuthenticated(true);
439
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
440
  }, []);
441
 
442
+ const handleLogin = (token: string, user: string, summary: string | null) => {
443
+ setUserToken(token);
444
+ setUsername(user);
445
+ setUserSummary(summary);
446
+ setIsAuthenticated(true);
447
+
448
+ if (summary) {
449
+ setStatus(`Welcome back, ${user}!`);
450
+ } else {
451
+ setStatus(`Welcome, ${user}!`);
452
+ }
453
+ };
454
 
455
+ const handleLogout = async () => {
456
+ // Stop everything
457
+ if (wsRef.current) wsRef.current.close();
458
+ if (mediaRecorderRef.current) mediaRecorderRef.current.stop();
459
+ if (recognitionRef.current) {
460
+ try {
461
+ recognitionRef.current.stop();
462
+ } catch {}
463
+ }
464
+ if (videoRef.current?.srcObject) {
465
+ const stream = videoRef.current.srcObject as MediaStream;
466
+ stream?.getTracks().forEach((t) => t.stop());
467
+ }
468
 
469
+ // Logout API call
 
470
  try {
471
+ const protocol = window.location.protocol;
472
+ const host = window.location.host;
473
+ await fetch(`${protocol}//${host}/api/logout`, {
474
+ method: "POST",
475
+ headers: { "Content-Type": "application/json" },
476
+ body: JSON.stringify({ token: userToken }),
477
+ });
478
+ } catch {}
479
+
480
+ // Clear local state
481
+ localStorage.removeItem("mrrrme_token");
482
+ localStorage.removeItem("mrrrme_username");
483
+ setIsAuthenticated(false);
484
+ setUsername("");
485
+ setUserToken("");
486
+ setUserSummary(null);
487
+ setIsActive(false);
488
+ setMessages([]);
489
+ };
490
 
491
  function connectWebSocket() {
492
  const ws = new WebSocket(BACKEND_WS);
493
  ws.onopen = () => {
494
+ // Authenticate
495
+ ws.send(JSON.stringify({ type: "auth", token: userToken }));
496
  wsRef.current = ws;
 
 
 
 
 
 
497
  };
498
+
499
  ws.onmessage = async (event) => {
500
  const data = JSON.parse(event.data);
501
+
502
+ if (data.type === "authenticated") {
503
+ setStatus("Connected");
504
+ // Send preferences
505
+ ws.send(JSON.stringify({
506
+ type: "preferences",
507
+ voice: selectedVoice,
508
+ language: selectedLanguage
509
+ }));
510
+ } else if (data.type === "face_emotion") {
511
+ setFaceEmotion(data.emotion);
512
+ } else if (data.type === "voice_emotion") {
513
+ setVoiceEmotion(data.emotion);
514
+ } else if (data.type === "llm_response") {
515
  setMessages((prev) => [
516
  ...prev,
517
  { id: Date.now().toString(), role: "assistant", content: data.text, timestamp: new Date(), emotion: data.emotion },
 
531
  setIsAvatarSpeaking(false);
532
  shouldAutoRestartRef.current = true;
533
  }
534
+ } else if (data.type === "error") {
535
+ setStatus(data.message);
536
  }
537
  };
538
+
539
  ws.onerror = () => setStatus("Connection error");
540
  ws.onclose = () => {
541
  setStatus("Disconnected");
 
543
  };
544
  }
545
 
546
+ async function startCapture() {
547
+ if (isActive) return;
548
+ try {
549
+ const stream = await navigator.mediaDevices.getUserMedia({
550
+ video: { width: 640, height: 480 },
551
+ audio: true
552
+ });
553
+ if (videoRef.current) {
554
+ videoRef.current.srcObject = stream;
555
+ await videoRef.current.play();
556
+ }
557
+ connectWebSocket();
558
+ startVideoCapture();
559
+ startAudioCapture(stream);
560
+ startSpeechRecognition();
561
+ setIsActive(true);
562
+ setStatus("Listening...");
563
+ } catch (err) {
564
+ console.error("[Error]", err);
565
+ setStatus("Camera/microphone denied");
566
+ }
567
+ }
568
+
569
  function startVideoCapture() {
570
  const canvas = document.createElement("canvas");
571
  const ctx = canvas.getContext("2d");
 
605
  setStatus("Speech not supported");
606
  return;
607
  }
 
 
 
 
 
 
 
 
 
 
608
 
609
  const recognition = new SpeechRecognition();
610
  recognition.continuous = true;
611
  recognition.interimResults = true;
612
  recognition.lang = selectedLanguage === "nl" ? "nl-NL" : "en-US";
 
 
 
613
 
614
  let finalTranscript = "";
 
 
 
 
 
 
 
 
 
 
 
615
 
616
  recognition.onresult = (event: any) => {
617
  if (isAvatarSpeaking) return;
618
 
619
+ let interim = "";
 
 
 
 
 
 
 
 
620
  for (let i = event.resultIndex; i < event.results.length; i++) {
621
+ const transcript = event.results[i][0].transcript;
622
+ if (event.results[i].isFinal) {
623
+ finalTranscript += transcript + " ";
 
 
 
 
 
 
 
624
  } else {
625
+ interim += transcript;
 
626
  }
627
  }
628
 
629
+ if (finalTranscript.trim()) {
630
+ const text = finalTranscript.trim();
631
+ setMessages((prev) => [
632
+ ...prev,
633
+ { id: Date.now().toString(), role: "user", content: text, timestamp: new Date() },
634
+ ]);
635
+ wsRef.current?.send(JSON.stringify({ type: "speech_end", text }));
636
+ finalTranscript = "";
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
637
  }
638
  };
639
 
640
  recognition.onerror = (event: any) => {
641
+ if (event.error !== "no-speech") {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
642
  setStatus(`Speech error: ${event.error}`);
643
  }
644
  };
645
 
646
  recognition.onend = () => {
647
+ if (shouldAutoRestartRef.current && !isAvatarSpeaking) {
648
+ try {
649
+ recognition.start();
650
+ } catch {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
651
  }
652
  };
653
 
654
+ recognition.start();
655
+ recognitionRef.current = recognition;
 
 
 
 
 
656
  }
657
 
658
  async function playAvatarResponse(data: any) {
659
+ if (!audioRef.current) return;
660
+
 
 
 
661
  audioRef.current.pause();
 
 
662
  isPlayingRef.current = false;
 
663
  setLiveBlend({});
664
  visemesRef.current = (data.visemes as Viseme[]).sort((a, b) => a.t - b.t);
665
 
666
  const protocol = window.location.protocol;
667
  const host = window.location.host;
668
+ const url = data.audio_url.startsWith("http")
669
+ ? data.audio_url
670
+ : `${protocol}//${host}${data.audio_url}`;
671
  audioRef.current.src = url;
672
 
673
  try {
674
+ await new Promise((resolve, reject) => {
675
+ if (!audioRef.current) return reject();
676
+ audioRef.current.oncanplaythrough = () => resolve(true);
677
+ audioRef.current.onerror = () => reject();
678
+ audioRef.current.load();
679
+ });
 
 
 
680
 
681
  isPlayingRef.current = true;
682
+ await audioRef.current.play();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
683
 
684
  await new Promise((resolve) => {
685
  if (!audioRef.current) return resolve(true);
686
+ audioRef.current.onended = () => resolve(true);
 
 
 
 
 
 
687
  });
688
  } catch {
689
  setStatus("Audio error");
 
 
 
 
 
 
 
690
  }
691
 
692
  setIsAvatarSpeaking(false);
693
  setStatus("Listening...");
 
694
  isPlayingRef.current = false;
695
  setLiveBlend({});
696
+ shouldAutoRestartRef.current = true;
697
+
698
  if (recognitionRef.current) {
699
  try { recognitionRef.current.start(); } catch {}
700
  }
 
707
  if (a && visemesRef.current.length > 0 && isPlayingRef.current) {
708
  const t = a.currentTime;
709
  while (idxRef.current < visemesRef.current.length && visemesRef.current[idxRef.current].t <= t + 0.02) {
710
+ setLiveBlend(visemesRef.current[idxRef.current].blend);
 
711
  idxRef.current++;
712
  }
 
 
 
 
 
 
713
  }
714
  raf = requestAnimationFrame(tick);
715
  };
 
717
  return () => cancelAnimationFrame(raf);
718
  }, []);
719
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
720
  const handleAvaturnExport = useCallback(async (remoteUrl: string) => {
 
 
721
  const oldUrl = avatarUrlRef.current;
722
  try {
723
  if (oldUrl !== DEFAULT_AVATAR) {
 
725
  }
726
  } catch {}
727
 
728
+ if (objectUrlRef.current) {
729
  URL.revokeObjectURL(objectUrlRef.current);
730
  objectUrlRef.current = null;
731
  }
732
 
733
+ setStatus("Loading avatar...");
734
+ setShowAvatarCreator(false);
735
+
736
+ await new Promise(resolve => setTimeout(resolve, 300));
737
+
738
+ flushSync(() => {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
739
  setAvatarUrl(remoteUrl);
740
+ setAvatarRenderKey(prev => prev + 1);
741
+ });
742
+
743
+ setStatus("Avatar updated!");
744
+
745
+ setTimeout(() => {
746
+ setStatus("What can I do for you today?");
747
+ }, 2000);
 
 
 
 
 
 
 
748
  }, []);
749
 
750
+ if (!isAuthenticated) {
751
+ return <LoginScreen onLogin={handleLogin} />;
752
+ }
753
+
754
  return (
755
  <div className="relative w-screen h-screen bg-gradient-to-br from-slate-950 via-purple-950 to-slate-900 overflow-hidden">
756
  <div className="absolute top-6 left-6 z-30">
757
+ <div className="text-white font-bold">MrrrMe - {username}</div>
758
  <div className="text-white/60 text-sm">{status}</div>
759
  </div>
760
 
761
+ <div className="absolute top-6 right-6 z-30 flex gap-3">
762
+ {isActive && (
763
+ <>
764
+ <button
765
+ onClick={() => setShowAvatarCreator(true)}
766
+ className="w-12 h-12 rounded-full backdrop-blur-xl bg-white/10 border border-white/20 flex items-center justify-center hover:bg-white/20"
767
+ >
768
+ <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
769
+ <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M16 7a4 4 0 11-8 0 4 4 0 018 0zM12 14a7 7 0 00-7 7h14a7 7 0 00-7-7z" />
770
+ </svg>
771
+ </button>
772
+ <button
773
+ onClick={() => setShowSettings(true)}
774
+ className="w-12 h-12 rounded-full backdrop-blur-xl bg-white/10 border border-white/20 flex items-center justify-center hover:bg-white/20"
775
+ >
776
+ <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
777
+ <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M10.325 4.317c.426-1.756 2.924-1.756 3.35 0a1.724 1.724 0 002.573 1.066c1.543-.94 3.31.826 2.37 2.37a1.724 1.724 0 001.065 2.572c1.756.426 1.756 2.924 0 3.35a1.724 1.724 0 00-1.066 2.573c.94 1.543-.826 3.31-2.37 2.37a1.724 1.724 0 00-2.572 1.065c-.426 1.756-2.924 1.756-3.35 0a1.724 1.724 0 00-2.573-1.066c-1.543.94-3.31-.826-2.37-2.37a1.724 1.724 0 00-1.065-2.572c-1.756-.426-1.756-2.924 0-3.35a1.724 1.724 0 001.066-2.573c-.94-1.543.826-3.31 2.37-2.37.996.608 2.296.07 2.572-1.065z" />
778
+ <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 12a3 3 0 11-6 0 3 3 0 016 0z" />
779
+ </svg>
780
+ </button>
781
+ <button
782
+ onClick={() => setShowHistory(!showHistory)}
783
+ className="w-12 h-12 rounded-full backdrop-blur-xl bg-white/10 border border-white/20 flex items-center justify-center hover:bg-white/20"
784
+ >
785
+ <svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor">
786
+ <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 8v4l3 3m6-3a9 9 0 11-18 0 9 9 0 0118 0z" />
787
+ </svg>
788
+ </button>
789
+ <button
790
+ onClick={handleLogout}
791
+ className="w-12 h-12 rounded-full backdrop-blur-xl bg-red-500/20 border border-red-500/50 flex items-center justify-center hover:bg-red-500/30"
792
+ >
793
+ <svg className="w-5 h-5 text-red-200" fill="none" viewBox="0 0 24 24" stroke="currentColor">
794
+ <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M17 16l4-4m0 0l-4-4m4 4H7m6 4v1a3 3 0 01-3 3H6a3 3 0 01-3-3V7a3 3 0 013-3h4a3 3 0 013 3v1" />
795
+ </svg>
796
+ </button>
797
+ </>
798
+ )}
799
+ </div>
800
 
801
+ {/* Settings Modal */}
802
  {showSettings && (
803
  <>
804
+ <div className="absolute inset-0 bg-black/60 backdrop-blur-sm z-40" onClick={() => setShowSettings(false)} />
805
+ <div className="absolute inset-0 flex items-center justify-center z-50">
806
+ <div className="backdrop-blur-xl bg-white/10 border border-white/20 rounded-3xl p-8 max-w-md w-full mx-4">
807
+ <h2 className="text-white text-3xl font-bold mb-6 text-center">Settings</h2>
 
 
 
 
 
 
 
808
 
809
+ <div className="space-y-4 mb-6">
 
 
 
 
 
810
  <div>
811
+ <label className="text-white/90 font-semibold mb-2 block">Language</label>
 
 
812
  <div className="flex gap-3">
813
  <button
814
+ onClick={() => setSelectedLanguage("en")}
815
+ className={`flex-1 py-3 rounded-xl font-semibold ${
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
816
  selectedLanguage === "en"
817
+ ? "bg-gradient-to-r from-blue-500 to-purple-600 text-white"
818
+ : "bg-white/10 text-white/70 hover:bg-white/20"
819
  }`}
820
  >
821
  English
822
  </button>
823
  <button
824
+ onClick={() => setSelectedLanguage("nl")}
825
+ className={`flex-1 py-3 rounded-xl font-semibold ${
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
826
  selectedLanguage === "nl"
827
+ ? "bg-gradient-to-r from-blue-500 to-purple-600 text-white"
828
+ : "bg-white/10 text-white/70 hover:bg-white/20"
829
  }`}
830
  >
831
  Nederlands
 
833
  </div>
834
  </div>
835
 
 
836
  <div>
837
+ <label className="text-white/90 font-semibold mb-2 block">Voice</label>
 
 
838
  <div className="flex gap-3">
839
  <button
840
+ onClick={() => setSelectedVoice("female")}
841
+ className={`flex-1 py-3 rounded-xl font-semibold ${
 
 
 
 
 
 
 
 
 
842
  selectedVoice === "female"
843
+ ? "bg-gradient-to-r from-pink-500 to-rose-600 text-white"
844
+ : "bg-white/10 text-white/70 hover:bg-white/20"
845
  }`}
846
  >
847
  Female
848
  </button>
849
  <button
850
+ onClick={() => setSelectedVoice("male")}
851
+ className={`flex-1 py-3 rounded-xl font-semibold ${
 
 
 
 
 
 
 
 
 
852
  selectedVoice === "male"
853
+ ? "bg-gradient-to-r from-blue-500 to-cyan-600 text-white"
854
+ : "bg-white/10 text-white/70 hover:bg-white/20"
855
  }`}
856
  >
857
  Male
 
862
 
863
  <button
864
  onClick={() => setShowSettings(false)}
865
+ className="w-full py-3 rounded-full bg-gradient-to-r from-blue-500 to-purple-600 text-white font-semibold"
866
  >
867
+ Done
 
 
 
868
  </button>
869
  </div>
870
  </div>
871
  </>
872
  )}
873
 
874
+ {/* Config Screen */}
875
+ {showConfigScreen && !isActive && (
876
+ <div className="absolute inset-0 flex items-center justify-center z-50 bg-gradient-to-br from-slate-950 via-purple-950 to-slate-900">
877
+ <div className="backdrop-blur-xl bg-white/10 border border-white/20 rounded-3xl p-8 max-w-md w-full mx-4">
878
+ <h2 className="text-white text-4xl font-bold mb-6 text-center">Configure Avatar</h2>
 
879
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
880
  <button
881
  onClick={() => {
 
882
  setShowConfigScreen(false);
883
  startCapture();
884
  }}
885
+ className="w-full py-3 rounded-full bg-gradient-to-r from-blue-500 to-purple-600 text-white font-semibold"
886
  >
887
+ Start
 
 
 
 
 
 
 
888
  </button>
889
  </div>
890
  </div>
891
  )}
892
 
893
  {!isActive && !showAvatarCreator && !showConfigScreen && (
894
+ <div className="absolute inset-0 flex flex-col items-center justify-center z-50">
895
+ <h1 className="text-white text-4xl md:text-6xl font-bold mb-4 text-center px-4">
896
+ Welcome back, {username}!
897
  </h1>
898
+ <p className="text-white/60 text-lg mb-8 text-center px-4">
899
+ What can I do for you today?
900
  </p>
901
  <button
902
  onClick={startCapture}
903
+ className="px-12 py-4 bg-gradient-to-r from-blue-500 to-purple-600 rounded-full text-white font-semibold hover:scale-105 transition-all mb-4"
 
904
  >
905
+ Start Conversation
 
 
 
 
 
 
906
  </button>
907
  <button
908
  onClick={() => setShowAvatarCreator(true)}
909
+ className="px-8 py-3 bg-white/10 rounded-2xl text-white font-semibold hover:bg-white/20"
 
910
  >
911
+ 🎭 Create Avatar
912
  </button>
913
  </div>
914
  )}
915
 
916
+ {/* History Panel */}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
917
  {showHistory && isActive && (
918
  <>
919
+ <div className="absolute inset-0 bg-black/60 z-40" onClick={() => setShowHistory(false)} />
920
+ <div className="absolute top-0 right-0 h-full z-50 backdrop-blur-2xl bg-black/40 border-l border-white/10 w-full md:w-[400px]">
921
  <div className="h-full flex flex-col p-6">
922
  <div className="flex items-center justify-between mb-6">
923
+ <h2 className="text-white text-2xl font-bold">History</h2>
924
  <button
925
  onClick={() => setShowHistory(false)}
926
+ className="w-10 h-10 bg-white/10 rounded-xl flex items-center justify-center hover:bg-white/20"
927
  >
928
+ βœ•
 
 
929
  </button>
930
  </div>
931
+ <div className="flex-1 overflow-y-auto">
932
+ {messages.map((m) => (
933
+ <MessageBubble key={m.id} message={m} />
934
+ ))}
935
+ <div ref={historyEndRef} />
 
 
 
 
 
 
 
 
 
 
 
 
 
936
  </div>
937
  </div>
938
  </div>
939
  </>
940
  )}
941
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
942
  {isActive && (
943
+ <div className="absolute bottom-8 left-1/2 transform -translate-x-1/2 w-full max-w-2xl px-6 z-30">
944
+ <input
945
+ type="text"
946
+ placeholder="Type or speak..."
947
+ className="w-full px-8 py-4 bg-white/10 backdrop-blur-xl border border-white/20 rounded-full text-white placeholder-white/40 focus:outline-none"
948
+ onKeyDown={(e) => {
949
+ if (e.key === "Enter" && wsRef.current) {
950
+ const text = (e.target as HTMLInputElement).value;
951
+ if (text.trim()) {
952
+ setMessages((prev) => [
953
+ ...prev,
954
+ { id: Date.now().toString(), role: "user", content: text, timestamp: new Date() },
955
+ ]);
956
+ wsRef.current.send(JSON.stringify({ type: "speech_end", text }));
957
+ (e.target as HTMLInputElement).value = "";
 
 
 
 
 
 
958
  }
959
+ }
960
+ }}
961
+ />
962
  </div>
963
  )}
964
 
 
971
  position: 'absolute',
972
  inset: 0,
973
  zIndex: showAvatarCreator ? 0 : 1,
 
974
  }}>
975
  <Canvas camera={{ position: [0, 0.2, 1.5], fov: 50 }}>
976
  <Environment preset="studio" />
977
  <directionalLight position={[5, 5, 5]} intensity={1} />
978
  <ambientLight intensity={0.5} />
979
+ <React.Suspense fallback={<Html center><div className="text-white">Loading...</div></Html>}>
 
 
 
 
 
 
 
 
980
  {avatarUrl && <Avatar
981
  key={avatarRenderKey}
982
  liveBlend={liveBlend}
 
989
  </Canvas>
990
  </div>
991
 
 
992
  <AvaturnModal
993
  open={showAvatarCreator}
994
  onClose={() => setShowAvatarCreator(false)}
995
  onExport={handleAvaturnExport}
 
996
  />
997
 
998
  <style jsx global>{`
 
999
  @keyframes slide-up { from { opacity: 0; transform: translateY(30px);} to { opacity: 1; transform: translateY(0);} }
 
 
 
 
 
 
 
 
1000
  .animate-slide-up{animation:slide-up .5s ease-out}
 
 
 
 
 
 
 
 
 
 
 
1001
  `}</style>
1002
  </div>
1003
  );
mrrrme/backend_server.py CHANGED
@@ -1,41 +1,13 @@
1
- """MrrrMe Backend WebSocket Server - Web-Accessible Emotion AI"""
2
  import os
3
  import sys
4
 
5
- # ===== SET CACHE DIRECTORIES FIRST =====
6
  os.environ['HF_HOME'] = '/tmp/huggingface'
7
  os.environ['TRANSFORMERS_CACHE'] = '/tmp/transformers'
8
- os.environ['HF_HUB_CACHE'] = '/tmp/huggingface/hub'
9
- os.environ['TORCH_HOME'] = '/tmp/torch'
10
  os.makedirs('/tmp/huggingface', exist_ok=True)
11
  os.makedirs('/tmp/transformers', exist_ok=True)
12
- os.makedirs('/tmp/huggingface/hub', exist_ok=True)
13
- os.makedirs('/tmp/torch', exist_ok=True)
14
 
15
- # ===== GPU FIX: Patch TensorBoard =====
16
- class DummySummaryWriter:
17
- def __init__(self, *args, **kwargs): pass
18
- def __getattr__(self, name): return lambda *args, **kwargs: None
19
-
20
- try:
21
- import tensorboardX
22
- tensorboardX.SummaryWriter = DummySummaryWriter
23
- except: pass
24
-
25
- # ===== GPU FIX: Patch Logging to redirect /work paths =====
26
- import logging
27
- _original_FileHandler = logging.FileHandler
28
-
29
- class RedirectingFileHandler(_original_FileHandler):
30
- def __init__(self, filename, mode='a', encoding=None, delay=False, errors=None):
31
- if isinstance(filename, str) and filename.startswith('/work'):
32
- filename = '/tmp/openface_log.txt'
33
- os.makedirs(os.path.dirname(filename) if os.path.dirname(filename) else '/tmp', exist_ok=True)
34
- super().__init__(filename, mode, encoding, delay, errors)
35
-
36
- logging.FileHandler = RedirectingFileHandler
37
-
38
- # Now import everything else
39
  import asyncio
40
  import json
41
  import base64
@@ -43,20 +15,22 @@ import numpy as np
43
  import cv2
44
  import io
45
  import torch
46
- from fastapi import FastAPI, WebSocket, WebSocketDisconnect
47
  from fastapi.middleware.cors import CORSMiddleware
 
48
  import requests
49
  from PIL import Image
 
 
50
 
51
- # Check GPU
52
- if not torch.cuda.is_available():
53
- print("[Backend] ⚠️ No GPU detected - using CPU mode")
54
- else:
55
- print(f"[Backend] βœ… GPU available: {torch.cuda.get_device_name(0)}")
56
 
57
  app = FastAPI()
58
 
59
- # CORS for browser access
60
  app.add_middleware(
61
  CORSMiddleware,
62
  allow_origins=["*"],
@@ -65,7 +39,7 @@ app.add_middleware(
65
  allow_headers=["*"],
66
  )
67
 
68
- # Global model variables (will be loaded after startup)
69
  face_processor = None
70
  text_analyzer = None
71
  whisper_worker = None
@@ -74,23 +48,160 @@ llm_generator = None
74
  fusion_engine = None
75
  models_ready = False
76
 
77
- # Avatar backend URL
78
  AVATAR_API = "http://localhost:8765"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
 
80
  @app.on_event("startup")
81
  async def startup_event():
82
- """Start loading models in background after server is ready"""
83
  asyncio.create_task(load_models())
84
 
85
  async def load_models():
86
- """Load all AI models asynchronously"""
87
  global face_processor, text_analyzer, whisper_worker, voice_worker
88
  global llm_generator, fusion_engine, models_ready
89
 
90
- print("[Backend] πŸš€ Initializing MrrrMe AI models in background...")
91
 
92
  try:
93
- # Import modules
94
  from mrrrme.vision.face_processor import FaceProcessor
95
  from mrrrme.audio.voice_emotion import VoiceEmotionWorker
96
  from mrrrme.audio.whisper_transcription import WhisperTranscriptionWorker
@@ -98,24 +209,14 @@ async def load_models():
98
  from mrrrme.nlp.llm_generator_groq import LLMResponseGenerator
99
  from mrrrme.config import FUSE4
100
 
101
- # Load models
102
- print("[Backend] Loading FaceProcessor...")
103
  face_processor = FaceProcessor()
104
-
105
- print("[Backend] Loading TextSentiment...")
106
  text_analyzer = TextSentimentAnalyzer()
107
-
108
- print("[Backend] Loading Whisper...")
109
  whisper_worker = WhisperTranscriptionWorker(text_analyzer)
110
-
111
- print("[Backend] Loading VoiceEmotion...")
112
  voice_worker = VoiceEmotionWorker(whisper_worker=whisper_worker)
113
 
114
- print("[Backend] Initializing LLM...")
115
  groq_api_key = os.getenv("GROQ_API_KEY", "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")
116
  llm_generator = LLMResponseGenerator(api_key=groq_api_key)
117
 
118
- # Initialize fusion engine
119
  class FusionEngine:
120
  def __init__(self):
121
  self.alpha_face = 0.5
@@ -144,99 +245,188 @@ async def load_models():
144
  import traceback
145
  traceback.print_exc()
146
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
147
  @app.get("/")
148
  async def root():
149
- """Root endpoint"""
150
- return {
151
- "status": "running",
152
- "models_ready": models_ready,
153
- "message": "MrrrMe AI Backend"
154
- }
155
 
156
  @app.get("/health")
157
  async def health():
158
- """Health check - responds immediately"""
159
- return {
160
- "status": "healthy",
161
- "models_ready": models_ready
162
- }
163
 
164
  @app.websocket("/ws")
165
  async def websocket_endpoint(websocket: WebSocket):
166
  await websocket.accept()
167
  print("[WebSocket] βœ… Client connected!")
168
 
169
- # Wait for models to load if needed
170
- if not models_ready:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
171
  await websocket.send_json({
172
- "type": "status",
173
- "message": "AI models are loading, please wait..."
 
174
  })
175
 
176
- # Wait up to 15 minutes for models
177
- for _ in range(900):
178
- if models_ready:
179
- await websocket.send_json({
180
- "type": "status",
181
- "message": "Models loaded! Ready to chat."
182
- })
183
- break
184
- await asyncio.sleep(1)
185
-
186
  if not models_ready:
187
- await websocket.send_json({
188
- "type": "error",
189
- "message": "Models failed to load. Please refresh."
190
- })
191
- return
192
-
193
- # Session state
194
- audio_buffer = []
195
- user_preferences = {"voice": "female"} # Store user preferences per session
196
-
197
- try:
 
 
 
 
 
198
  while True:
199
  data = await websocket.receive_json()
200
  msg_type = data.get("type")
201
 
202
- # ============ PREFERENCES UPDATE ============
203
  if msg_type == "preferences":
204
  if "voice" in data:
205
- user_preferences["voice"] = data.get("voice", "female")
206
  if "language" in data:
207
- user_preferences["language"] = data.get("language", "en")
208
- print(f"[Preferences] Updated: voice={user_preferences.get('voice')}, language={user_preferences.get('language')}")
209
  continue
210
 
211
- # ============ VIDEO FRAME ============
212
- if msg_type == "video_frame":
213
  try:
214
- # Decode base64 image
215
  img_data = base64.b64decode(data["frame"].split(",")[1])
216
  img = Image.open(io.BytesIO(img_data))
217
  frame = cv2.cvtColor(np.array(img), cv2.COLOR_RGB2BGR)
218
 
219
- # Process face emotion
220
- try:
221
- processed_frame, result = face_processor.process_frame(frame)
222
- face_emotion = face_processor.get_last_emotion() or "Neutral"
223
- face_confidence = face_processor.get_last_confidence() or 0.0
224
- except Exception as proc_err:
225
- print(f"[FaceProcessor] Error: {proc_err}")
226
- face_emotion = "Neutral"
227
- face_confidence = 0.0
228
 
229
- # Send face emotion to frontend
230
  await websocket.send_json({
231
  "type": "face_emotion",
232
  "emotion": face_emotion,
233
  "confidence": face_confidence
234
  })
235
-
236
  except Exception as e:
237
  print(f"[Video] Error: {e}")
238
 
239
- # ============ AUDIO CHUNK ============
240
  elif msg_type == "audio_chunk":
241
  try:
242
  audio_data = base64.b64decode(data["audio"])
@@ -249,29 +439,30 @@ async def websocket_endpoint(websocket: WebSocket):
249
  "emotion": voice_emotion
250
  })
251
  audio_buffer = audio_buffer[-3:]
252
-
253
  except Exception as e:
254
  print(f"[Audio] Error: {e}")
255
 
256
- # ============ USER FINISHED SPEAKING ============
257
  elif msg_type == "speech_end":
258
  transcription = data.get("text", "").strip()
259
- print(f"\n[Speech End] User said: '{transcription}'")
260
 
261
- # Filter short/meaningless transcriptions
262
  if len(transcription) < 2:
263
  continue
264
 
265
- hallucinations = {"thank you", "thanks", "okay", "ok", "you", "yeah", "yep"}
266
- if transcription.lower().strip('.,!?') in hallucinations:
267
- continue
 
 
 
 
 
 
268
 
269
  try:
270
- # Get face emotion
271
  face_emotion = face_processor.get_last_emotion()
272
  face_confidence = face_processor.get_last_confidence()
273
 
274
- # Create emotion probabilities
275
  emotion_map = {'Neutral': 0, 'Happy': 1, 'Sad': 2, 'Angry': 3}
276
  face_probs = np.array([0.25, 0.25, 0.25, 0.25], dtype=np.float32)
277
  if face_emotion in emotion_map:
@@ -279,32 +470,40 @@ async def websocket_endpoint(websocket: WebSocket):
279
  face_probs[face_idx] = face_confidence
280
  face_probs = face_probs / face_probs.sum()
281
 
282
- # Get voice and text emotions
283
  voice_probs, voice_emotion = voice_worker.get_probs()
284
  text_analyzer.analyze(transcription)
285
  text_probs, _ = text_analyzer.get_probs()
286
 
287
- # Fuse emotions
288
  fused_emotion, intensity = fusion_engine.fuse(
289
  face_probs, voice_probs, text_probs
290
  )
291
 
292
- print(f"[Fusion] Face: {face_emotion}, Voice: {voice_emotion}, Fused: {fused_emotion}")
 
 
 
293
 
294
- # Generate LLM response
295
  response_text = llm_generator.generate_response(
296
  fused_emotion, face_emotion, voice_emotion,
297
- transcription, force=True, intensity=intensity
298
  )
299
 
300
- print(f"[LLM] Response: '{response_text}'")
 
 
 
 
 
 
 
 
301
 
302
- # Send to avatar for TTS
303
  try:
304
- voice_preference = user_preferences.get("voice", "female")
305
  avatar_response = requests.post(
306
  f"{AVATAR_API}/speak",
307
- data={"text": response_text, "voice": voice_preference},
308
  timeout=45
309
  )
310
  avatar_response.raise_for_status()
@@ -318,7 +517,6 @@ async def websocket_endpoint(websocket: WebSocket):
318
  "audio_url": avatar_data.get("audio_url"),
319
  "visemes": avatar_data.get("visemes")
320
  })
321
-
322
  except Exception as avatar_err:
323
  print(f"[Avatar] Error: {avatar_err}")
324
  await websocket.send_json({
@@ -328,7 +526,6 @@ async def websocket_endpoint(websocket: WebSocket):
328
  "intensity": intensity,
329
  "error": "Avatar TTS failed"
330
  })
331
-
332
  except Exception as e:
333
  print(f"[Speech Processing] Error: {e}")
334
  import traceback
@@ -336,6 +533,14 @@ async def websocket_endpoint(websocket: WebSocket):
336
 
337
  except WebSocketDisconnect:
338
  print("[WebSocket] ❌ Client disconnected")
 
 
 
 
 
 
 
 
339
  except Exception as e:
340
  print(f"[WebSocket] Error: {e}")
341
  import traceback
 
1
+ """MrrrMe Backend WebSocket Server - With User Authentication & Chat History"""
2
  import os
3
  import sys
4
 
5
+ # Cache setup
6
  os.environ['HF_HOME'] = '/tmp/huggingface'
7
  os.environ['TRANSFORMERS_CACHE'] = '/tmp/transformers'
 
 
8
  os.makedirs('/tmp/huggingface', exist_ok=True)
9
  os.makedirs('/tmp/transformers', exist_ok=True)
 
 
10
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  import asyncio
12
  import json
13
  import base64
 
15
  import cv2
16
  import io
17
  import torch
18
+ from fastapi import FastAPI, WebSocket, WebSocketDisconnect, HTTPException
19
  from fastapi.middleware.cors import CORSMiddleware
20
+ from pydantic import BaseModel
21
  import requests
22
  from PIL import Image
23
+ from typing import Optional
24
+ from groq import Groq
25
 
26
+ # Database
27
+ import sqlite3
28
+ import secrets
29
+ import hashlib
30
+ from datetime import datetime
31
 
32
  app = FastAPI()
33
 
 
34
  app.add_middleware(
35
  CORSMiddleware,
36
  allow_origins=["*"],
 
39
  allow_headers=["*"],
40
  )
41
 
42
+ # Models
43
  face_processor = None
44
  text_analyzer = None
45
  whisper_worker = None
 
48
  fusion_engine = None
49
  models_ready = False
50
 
 
51
  AVATAR_API = "http://localhost:8765"
52
+ DB_PATH = "/tmp/mrrrme_users.db"
53
+
54
+ # Auth models
55
+ class SignupRequest(BaseModel):
56
+ username: str
57
+ password: str
58
+
59
+ class LoginRequest(BaseModel):
60
+ username: str
61
+ password: str
62
+
63
+ # Database setup
64
+ def init_db():
65
+ conn = sqlite3.connect(DB_PATH)
66
+ cursor = conn.cursor()
67
+
68
+ cursor.execute("""
69
+ CREATE TABLE IF NOT EXISTS users (
70
+ user_id TEXT PRIMARY KEY,
71
+ username TEXT UNIQUE NOT NULL,
72
+ password_hash TEXT NOT NULL,
73
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
74
+ )
75
+ """)
76
+
77
+ cursor.execute("""
78
+ CREATE TABLE IF NOT EXISTS sessions (
79
+ session_id TEXT PRIMARY KEY,
80
+ user_id TEXT NOT NULL,
81
+ token TEXT UNIQUE NOT NULL,
82
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
83
+ is_active BOOLEAN DEFAULT 1
84
+ )
85
+ """)
86
+
87
+ cursor.execute("""
88
+ CREATE TABLE IF NOT EXISTS messages (
89
+ message_id INTEGER PRIMARY KEY AUTOINCREMENT,
90
+ session_id TEXT NOT NULL,
91
+ role TEXT NOT NULL,
92
+ content TEXT NOT NULL,
93
+ emotion TEXT,
94
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
95
+ )
96
+ """)
97
+
98
+ cursor.execute("""
99
+ CREATE TABLE IF NOT EXISTS user_summaries (
100
+ user_id TEXT PRIMARY KEY,
101
+ summary_text TEXT NOT NULL,
102
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
103
+ )
104
+ """)
105
+
106
+ conn.commit()
107
+ conn.close()
108
+
109
+ init_db()
110
+
111
+ def hash_password(pw: str) -> str:
112
+ return hashlib.sha256(pw.encode()).hexdigest()
113
+
114
+ # Authentication endpoints
115
+ @app.post("/api/signup")
116
+ async def signup(req: SignupRequest):
117
+ conn = sqlite3.connect(DB_PATH)
118
+ cursor = conn.cursor()
119
+
120
+ try:
121
+ user_id = secrets.token_urlsafe(16)
122
+ cursor.execute(
123
+ "INSERT INTO users (user_id, username, password_hash) VALUES (?, ?, ?)",
124
+ (user_id, req.username, hash_password(req.password))
125
+ )
126
+ conn.commit()
127
+ conn.close()
128
+ return {"success": True, "message": "Account created!"}
129
+ except sqlite3.IntegrityError:
130
+ conn.close()
131
+ raise HTTPException(status_code=400, detail="Username already exists")
132
+
133
+ @app.post("/api/login")
134
+ async def login(req: LoginRequest):
135
+ conn = sqlite3.connect(DB_PATH)
136
+ cursor = conn.cursor()
137
+
138
+ cursor.execute(
139
+ "SELECT user_id, username FROM users WHERE username = ? AND password_hash = ?",
140
+ (req.username, hash_password(req.password))
141
+ )
142
+
143
+ result = cursor.fetchone()
144
+
145
+ if not result:
146
+ conn.close()
147
+ raise HTTPException(status_code=401, detail="Invalid credentials")
148
+
149
+ user_id, username = result
150
+
151
+ # Create session
152
+ session_id = secrets.token_urlsafe(16)
153
+ token = secrets.token_urlsafe(32)
154
+
155
+ cursor.execute(
156
+ "INSERT INTO sessions (session_id, user_id, token) VALUES (?, ?, ?)",
157
+ (session_id, user_id, token)
158
+ )
159
+
160
+ # Get user summary
161
+ cursor.execute(
162
+ "SELECT summary_text FROM user_summaries WHERE user_id = ?",
163
+ (user_id,)
164
+ )
165
+ summary_row = cursor.fetchone()
166
+ summary = summary_row[0] if summary_row else None
167
+
168
+ conn.commit()
169
+ conn.close()
170
+
171
+ return {
172
+ "success": True,
173
+ "token": token,
174
+ "username": username,
175
+ "user_id": user_id,
176
+ "summary": summary
177
+ }
178
+
179
+ @app.post("/api/logout")
180
+ async def logout(token: str):
181
+ conn = sqlite3.connect(DB_PATH)
182
+ cursor = conn.cursor()
183
+
184
+ cursor.execute(
185
+ "UPDATE sessions SET is_active = 0 WHERE token = ?",
186
+ (token,)
187
+ )
188
+
189
+ conn.commit()
190
+ conn.close()
191
+
192
+ return {"success": True}
193
 
194
  @app.on_event("startup")
195
  async def startup_event():
 
196
  asyncio.create_task(load_models())
197
 
198
  async def load_models():
 
199
  global face_processor, text_analyzer, whisper_worker, voice_worker
200
  global llm_generator, fusion_engine, models_ready
201
 
202
+ print("[Backend] πŸš€ Loading models...")
203
 
204
  try:
 
205
  from mrrrme.vision.face_processor import FaceProcessor
206
  from mrrrme.audio.voice_emotion import VoiceEmotionWorker
207
  from mrrrme.audio.whisper_transcription import WhisperTranscriptionWorker
 
209
  from mrrrme.nlp.llm_generator_groq import LLMResponseGenerator
210
  from mrrrme.config import FUSE4
211
 
 
 
212
  face_processor = FaceProcessor()
 
 
213
  text_analyzer = TextSentimentAnalyzer()
 
 
214
  whisper_worker = WhisperTranscriptionWorker(text_analyzer)
 
 
215
  voice_worker = VoiceEmotionWorker(whisper_worker=whisper_worker)
216
 
 
217
  groq_api_key = os.getenv("GROQ_API_KEY", "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")
218
  llm_generator = LLMResponseGenerator(api_key=groq_api_key)
219
 
 
220
  class FusionEngine:
221
  def __init__(self):
222
  self.alpha_face = 0.5
 
245
  import traceback
246
  traceback.print_exc()
247
 
248
+ async def generate_session_summary(session_id: str, user_id: str):
249
+ """Generate AI summary of conversation"""
250
+ conn = sqlite3.connect(DB_PATH)
251
+ cursor = conn.cursor()
252
+
253
+ cursor.execute(
254
+ "SELECT role, content, emotion FROM messages WHERE session_id = ? ORDER BY timestamp ASC",
255
+ (session_id,)
256
+ )
257
+
258
+ messages = cursor.fetchall()
259
+ conn.close()
260
+
261
+ if len(messages) < 3:
262
+ return None
263
+
264
+ # Build conversation text
265
+ conversation = ""
266
+ for role, content, emotion in messages:
267
+ speaker = "User" if role == "user" else "AI"
268
+ emo_tag = f" [{emotion}]" if emotion else ""
269
+ conversation += f"{speaker}{emo_tag}: {content}\n"
270
+
271
+ # Generate summary with Groq
272
+ try:
273
+ groq_client = Groq(api_key=os.getenv("GROQ_API_KEY"))
274
+
275
+ prompt = f"""You are analyzing a conversation between a user and an AI emotion coach. Create a concise summary of the user to help the AI remember them in future conversations.
276
+
277
+ Conversation:
278
+ {conversation}
279
+
280
+ Create a 2-3 sentence summary covering:
281
+ - Key topics discussed
282
+ - User's emotional state/patterns
283
+ - Important personal details mentioned
284
+ - Preferences or needs
285
+
286
+ Keep it natural and helpful for continuing the relationship."""
287
+
288
+ response = groq_client.chat.completions.create(
289
+ model="llama-3.1-8b-instant",
290
+ messages=[{"role": "user", "content": prompt}],
291
+ max_tokens=150,
292
+ temperature=0.7
293
+ )
294
+
295
+ summary = response.choices[0].message.content.strip()
296
+
297
+ # Save summary
298
+ conn = sqlite3.connect(DB_PATH)
299
+ cursor = conn.cursor()
300
+
301
+ cursor.execute(
302
+ "INSERT OR REPLACE INTO user_summaries (user_id, summary_text, updated_at) VALUES (?, ?, ?)",
303
+ (user_id, summary, datetime.now())
304
+ )
305
+
306
+ conn.commit()
307
+ conn.close()
308
+
309
+ print(f"[Summary] βœ… Generated for user {user_id}: {summary[:60]}...")
310
+ return summary
311
+
312
+ except Exception as e:
313
+ print(f"[Summary] ❌ Error: {e}")
314
+ return None
315
+
316
  @app.get("/")
317
  async def root():
318
+ return {"status": "running", "models_ready": models_ready}
 
 
 
 
 
319
 
320
  @app.get("/health")
321
  async def health():
322
+ return {"status": "healthy", "models_ready": models_ready}
 
 
 
 
323
 
324
  @app.websocket("/ws")
325
  async def websocket_endpoint(websocket: WebSocket):
326
  await websocket.accept()
327
  print("[WebSocket] βœ… Client connected!")
328
 
329
+ # Wait for auth
330
+ session_data = None
331
+ user_summary = None
332
+
333
+ try:
334
+ auth_msg = await websocket.receive_json()
335
+
336
+ if auth_msg.get("type") != "auth":
337
+ await websocket.send_json({"type": "error", "message": "Authentication required"})
338
+ return
339
+
340
+ token = auth_msg.get("token")
341
+
342
+ # Validate token
343
+ conn = sqlite3.connect(DB_PATH)
344
+ cursor = conn.cursor()
345
+
346
+ cursor.execute(
347
+ "SELECT s.session_id, s.user_id, u.username FROM sessions s JOIN users u ON s.user_id = u.user_id WHERE s.token = ? AND s.is_active = 1",
348
+ (token,)
349
+ )
350
+
351
+ result = cursor.fetchone()
352
+
353
+ if not result:
354
+ await websocket.send_json({"type": "error", "message": "Invalid session"})
355
+ conn.close()
356
+ return
357
+
358
+ session_id, user_id, username = result
359
+
360
+ # Get user summary
361
+ cursor.execute(
362
+ "SELECT summary_text FROM user_summaries WHERE user_id = ?",
363
+ (user_id,)
364
+ )
365
+ summary_row = cursor.fetchone()
366
+ user_summary = summary_row[0] if summary_row else None
367
+
368
+ conn.close()
369
+
370
+ session_data = {
371
+ 'session_id': session_id,
372
+ 'user_id': user_id,
373
+ 'username': username
374
+ }
375
+
376
+ # Send welcome
377
  await websocket.send_json({
378
+ "type": "authenticated",
379
+ "username": username,
380
+ "summary": user_summary
381
  })
382
 
383
+ # Wait for models
 
 
 
 
 
 
 
 
 
384
  if not models_ready:
385
+ await websocket.send_json({"type": "status", "message": "Loading AI models..."})
386
+
387
+ for _ in range(900):
388
+ if models_ready:
389
+ await websocket.send_json({"type": "status", "message": "Ready!"})
390
+ break
391
+ await asyncio.sleep(1)
392
+
393
+ if not models_ready:
394
+ await websocket.send_json({"type": "error", "message": "Models failed to load"})
395
+ return
396
+
397
+ # Main loop
398
+ audio_buffer = []
399
+ user_preferences = {"voice": "female", "language": "en"}
400
+
401
  while True:
402
  data = await websocket.receive_json()
403
  msg_type = data.get("type")
404
 
 
405
  if msg_type == "preferences":
406
  if "voice" in data:
407
+ user_preferences["voice"] = data["voice"]
408
  if "language" in data:
409
+ user_preferences["language"] = data["language"]
 
410
  continue
411
 
412
+ elif msg_type == "video_frame":
 
413
  try:
 
414
  img_data = base64.b64decode(data["frame"].split(",")[1])
415
  img = Image.open(io.BytesIO(img_data))
416
  frame = cv2.cvtColor(np.array(img), cv2.COLOR_RGB2BGR)
417
 
418
+ processed_frame, result = face_processor.process_frame(frame)
419
+ face_emotion = face_processor.get_last_emotion() or "Neutral"
420
+ face_confidence = face_processor.get_last_confidence() or 0.0
 
 
 
 
 
 
421
 
 
422
  await websocket.send_json({
423
  "type": "face_emotion",
424
  "emotion": face_emotion,
425
  "confidence": face_confidence
426
  })
 
427
  except Exception as e:
428
  print(f"[Video] Error: {e}")
429
 
 
430
  elif msg_type == "audio_chunk":
431
  try:
432
  audio_data = base64.b64decode(data["audio"])
 
439
  "emotion": voice_emotion
440
  })
441
  audio_buffer = audio_buffer[-3:]
 
442
  except Exception as e:
443
  print(f"[Audio] Error: {e}")
444
 
 
445
  elif msg_type == "speech_end":
446
  transcription = data.get("text", "").strip()
 
447
 
 
448
  if len(transcription) < 2:
449
  continue
450
 
451
+ # Save user message
452
+ conn = sqlite3.connect(DB_PATH)
453
+ cursor = conn.cursor()
454
+ cursor.execute(
455
+ "INSERT INTO messages (session_id, role, content) VALUES (?, ?, ?)",
456
+ (session_id, "user", transcription)
457
+ )
458
+ conn.commit()
459
+ conn.close()
460
 
461
  try:
462
+ # Get emotions
463
  face_emotion = face_processor.get_last_emotion()
464
  face_confidence = face_processor.get_last_confidence()
465
 
 
466
  emotion_map = {'Neutral': 0, 'Happy': 1, 'Sad': 2, 'Angry': 3}
467
  face_probs = np.array([0.25, 0.25, 0.25, 0.25], dtype=np.float32)
468
  if face_emotion in emotion_map:
 
470
  face_probs[face_idx] = face_confidence
471
  face_probs = face_probs / face_probs.sum()
472
 
 
473
  voice_probs, voice_emotion = voice_worker.get_probs()
474
  text_analyzer.analyze(transcription)
475
  text_probs, _ = text_analyzer.get_probs()
476
 
 
477
  fused_emotion, intensity = fusion_engine.fuse(
478
  face_probs, voice_probs, text_probs
479
  )
480
 
481
+ # Add user summary context if available
482
+ context_prefix = ""
483
+ if user_summary:
484
+ context_prefix = f"[User context: {user_summary}]\n\n"
485
 
 
486
  response_text = llm_generator.generate_response(
487
  fused_emotion, face_emotion, voice_emotion,
488
+ context_prefix + transcription, force=True, intensity=intensity
489
  )
490
 
491
+ # Save assistant message
492
+ conn = sqlite3.connect(DB_PATH)
493
+ cursor = conn.cursor()
494
+ cursor.execute(
495
+ "INSERT INTO messages (session_id, role, content, emotion) VALUES (?, ?, ?, ?)",
496
+ (session_id, "assistant", response_text, fused_emotion)
497
+ )
498
+ conn.commit()
499
+ conn.close()
500
 
501
+ # Send to avatar
502
  try:
503
+ voice_pref = user_preferences.get("voice", "female")
504
  avatar_response = requests.post(
505
  f"{AVATAR_API}/speak",
506
+ data={"text": response_text, "voice": voice_pref},
507
  timeout=45
508
  )
509
  avatar_response.raise_for_status()
 
517
  "audio_url": avatar_data.get("audio_url"),
518
  "visemes": avatar_data.get("visemes")
519
  })
 
520
  except Exception as avatar_err:
521
  print(f"[Avatar] Error: {avatar_err}")
522
  await websocket.send_json({
 
526
  "intensity": intensity,
527
  "error": "Avatar TTS failed"
528
  })
 
529
  except Exception as e:
530
  print(f"[Speech Processing] Error: {e}")
531
  import traceback
 
533
 
534
  except WebSocketDisconnect:
535
  print("[WebSocket] ❌ Client disconnected")
536
+
537
+ if session_data:
538
+ # Generate summary
539
+ print(f"[WebSocket] πŸ“ Generating summary for {session_data['username']}...")
540
+ summary = await generate_session_summary(session_data['session_id'], session_data['user_id'])
541
+ if summary:
542
+ print(f"[WebSocket] βœ… Summary saved!")
543
+
544
  except Exception as e:
545
  print(f"[WebSocket] Error: {e}")
546
  import traceback
mrrrme/database/__init__.py ADDED
File without changes
mrrrme/database/db_manager.py ADDED
@@ -0,0 +1,333 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Database manager for user sessions, chat history, and summaries"""
2
+ import sqlite3
3
+ import json
4
+ import hashlib
5
+ import secrets
6
+ from datetime import datetime, timedelta
7
+ from pathlib import Path
8
+ from typing import Optional, List, Dict
9
+
10
+ DB_PATH = Path("/tmp/mrrrme_users.db")
11
+
12
+
13
+ class DatabaseManager:
14
+ """Manages user authentication, chat history, and AI-generated summaries"""
15
+
16
+ def __init__(self, db_path: str = str(DB_PATH)):
17
+ self.db_path = db_path
18
+ self._init_database()
19
+ print(f"[Database] βœ… Initialized at {db_path}")
20
+
21
+ def _init_database(self):
22
+ """Create tables if they don't exist"""
23
+ conn = sqlite3.connect(self.db_path)
24
+ cursor = conn.cursor()
25
+
26
+ # Users table
27
+ cursor.execute("""
28
+ CREATE TABLE IF NOT EXISTS users (
29
+ user_id TEXT PRIMARY KEY,
30
+ username TEXT UNIQUE NOT NULL,
31
+ password_hash TEXT NOT NULL,
32
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
33
+ last_login TIMESTAMP,
34
+ total_sessions INTEGER DEFAULT 0
35
+ )
36
+ """)
37
+
38
+ # Sessions table
39
+ cursor.execute("""
40
+ CREATE TABLE IF NOT EXISTS sessions (
41
+ session_id TEXT PRIMARY KEY,
42
+ user_id TEXT NOT NULL,
43
+ token TEXT UNIQUE NOT NULL,
44
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
45
+ last_activity TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
46
+ is_active BOOLEAN DEFAULT 1,
47
+ FOREIGN KEY (user_id) REFERENCES users(user_id)
48
+ )
49
+ """)
50
+
51
+ # Messages table
52
+ cursor.execute("""
53
+ CREATE TABLE IF NOT EXISTS messages (
54
+ message_id INTEGER PRIMARY KEY AUTOINCREMENT,
55
+ session_id TEXT NOT NULL,
56
+ user_id TEXT NOT NULL,
57
+ role TEXT NOT NULL,
58
+ content TEXT NOT NULL,
59
+ emotion TEXT,
60
+ intensity REAL,
61
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
62
+ FOREIGN KEY (session_id) REFERENCES sessions(session_id),
63
+ FOREIGN KEY (user_id) REFERENCES users(user_id)
64
+ )
65
+ """)
66
+
67
+ # Summaries table (AI-generated user profiles)
68
+ cursor.execute("""
69
+ CREATE TABLE IF NOT EXISTS user_summaries (
70
+ summary_id INTEGER PRIMARY KEY AUTOINCREMENT,
71
+ user_id TEXT NOT NULL,
72
+ session_id TEXT,
73
+ summary_text TEXT NOT NULL,
74
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
75
+ message_count INTEGER,
76
+ FOREIGN KEY (user_id) REFERENCES users(user_id),
77
+ FOREIGN KEY (session_id) REFERENCES sessions(session_id)
78
+ )
79
+ """)
80
+
81
+ conn.commit()
82
+ conn.close()
83
+
84
+ def _hash_password(self, password: str) -> str:
85
+ """Hash password with salt"""
86
+ return hashlib.sha256(password.encode()).hexdigest()
87
+
88
+ def create_user(self, username: str, password: str) -> Optional[str]:
89
+ """Create new user, returns user_id"""
90
+ try:
91
+ conn = sqlite3.connect(self.db_path)
92
+ cursor = conn.cursor()
93
+
94
+ user_id = secrets.token_urlsafe(16)
95
+ password_hash = self._hash_password(password)
96
+
97
+ cursor.execute(
98
+ "INSERT INTO users (user_id, username, password_hash) VALUES (?, ?, ?)",
99
+ (user_id, username, password_hash)
100
+ )
101
+
102
+ conn.commit()
103
+ conn.close()
104
+
105
+ print(f"[Database] βœ… Created user: {username}")
106
+ return user_id
107
+ except sqlite3.IntegrityError:
108
+ return None
109
+ except Exception as e:
110
+ print(f"[Database] ❌ Error creating user: {e}")
111
+ return None
112
+
113
+ def authenticate_user(self, username: str, password: str) -> Optional[Dict]:
114
+ """Authenticate user and return user info"""
115
+ conn = sqlite3.connect(self.db_path)
116
+ cursor = conn.cursor()
117
+
118
+ password_hash = self._hash_password(password)
119
+
120
+ cursor.execute(
121
+ "SELECT user_id, username FROM users WHERE username = ? AND password_hash = ?",
122
+ (username, password_hash)
123
+ )
124
+
125
+ result = cursor.fetchone()
126
+
127
+ if result:
128
+ user_id, username = result
129
+
130
+ # Update last login
131
+ cursor.execute(
132
+ "UPDATE users SET last_login = ?, total_sessions = total_sessions + 1 WHERE user_id = ?",
133
+ (datetime.now(), user_id)
134
+ )
135
+ conn.commit()
136
+
137
+ conn.close()
138
+ return {'user_id': user_id, 'username': username}
139
+
140
+ conn.close()
141
+ return None
142
+
143
+ def create_session(self, user_id: str) -> Optional[Dict]:
144
+ """Create new session for user"""
145
+ try:
146
+ conn = sqlite3.connect(self.db_path)
147
+ cursor = conn.cursor()
148
+
149
+ session_id = secrets.token_urlsafe(16)
150
+ token = secrets.token_urlsafe(32)
151
+
152
+ cursor.execute(
153
+ "INSERT INTO sessions (session_id, user_id, token) VALUES (?, ?, ?)",
154
+ (session_id, user_id, token)
155
+ )
156
+
157
+ conn.commit()
158
+ conn.close()
159
+
160
+ print(f"[Database] βœ… Created session for user {user_id}")
161
+ return {
162
+ 'session_id': session_id,
163
+ 'token': token,
164
+ 'user_id': user_id
165
+ }
166
+ except Exception as e:
167
+ print(f"[Database] ❌ Error creating session: {e}")
168
+ return None
169
+
170
+ def validate_token(self, token: str) -> Optional[Dict]:
171
+ """Validate session token and return session info"""
172
+ conn = sqlite3.connect(self.db_path)
173
+ cursor = conn.cursor()
174
+
175
+ cursor.execute(
176
+ """
177
+ SELECT s.session_id, s.user_id, u.username
178
+ FROM sessions s
179
+ JOIN users u ON s.user_id = u.user_id
180
+ WHERE s.token = ? AND s.is_active = 1
181
+ """,
182
+ (token,)
183
+ )
184
+
185
+ result = cursor.fetchone()
186
+
187
+ if result:
188
+ session_id, user_id, username = result
189
+
190
+ # Update last activity
191
+ cursor.execute(
192
+ "UPDATE sessions SET last_activity = ? WHERE session_id = ?",
193
+ (datetime.now(), session_id)
194
+ )
195
+ conn.commit()
196
+
197
+ conn.close()
198
+ return {
199
+ 'session_id': session_id,
200
+ 'user_id': user_id,
201
+ 'username': username
202
+ }
203
+
204
+ conn.close()
205
+ return None
206
+
207
+ def add_message(self, session_id: str, user_id: str, role: str, content: str,
208
+ emotion: Optional[str] = None, intensity: Optional[float] = None):
209
+ """Add message to chat history"""
210
+ try:
211
+ conn = sqlite3.connect(self.db_path)
212
+ cursor = conn.cursor()
213
+
214
+ cursor.execute(
215
+ """
216
+ INSERT INTO messages (session_id, user_id, role, content, emotion, intensity)
217
+ VALUES (?, ?, ?, ?, ?, ?)
218
+ """,
219
+ (session_id, user_id, role, content, emotion, intensity)
220
+ )
221
+
222
+ conn.commit()
223
+ conn.close()
224
+ except Exception as e:
225
+ print(f"[Database] ❌ Error adding message: {e}")
226
+
227
+ def get_session_messages(self, session_id: str) -> List[Dict]:
228
+ """Get all messages for a session"""
229
+ conn = sqlite3.connect(self.db_path)
230
+ cursor = conn.cursor()
231
+
232
+ cursor.execute(
233
+ """
234
+ SELECT role, content, emotion, intensity, timestamp
235
+ FROM messages
236
+ WHERE session_id = ?
237
+ ORDER BY timestamp ASC
238
+ """,
239
+ (session_id,)
240
+ )
241
+
242
+ messages = []
243
+ for row in cursor.fetchall():
244
+ messages.append({
245
+ 'role': row[0],
246
+ 'content': row[1],
247
+ 'emotion': row[2],
248
+ 'intensity': row[3],
249
+ 'timestamp': row[4]
250
+ })
251
+
252
+ conn.close()
253
+ return messages
254
+
255
+ def get_user_summary(self, user_id: str) -> Optional[str]:
256
+ """Get most recent summary for user"""
257
+ conn = sqlite3.connect(self.db_path)
258
+ cursor = conn.cursor()
259
+
260
+ cursor.execute(
261
+ """
262
+ SELECT summary_text
263
+ FROM user_summaries
264
+ WHERE user_id = ?
265
+ ORDER BY created_at DESC
266
+ LIMIT 1
267
+ """,
268
+ (user_id,)
269
+ )
270
+
271
+ result = cursor.fetchone()
272
+ conn.close()
273
+
274
+ return result[0] if result else None
275
+
276
+ def add_summary(self, user_id: str, session_id: str, summary_text: str, message_count: int):
277
+ """Add AI-generated summary"""
278
+ try:
279
+ conn = sqlite3.connect(self.db_path)
280
+ cursor = conn.cursor()
281
+
282
+ cursor.execute(
283
+ """
284
+ INSERT INTO user_summaries (user_id, session_id, summary_text, message_count)
285
+ VALUES (?, ?, ?, ?)
286
+ """,
287
+ (user_id, session_id, summary_text, message_count)
288
+ )
289
+
290
+ conn.commit()
291
+ conn.close()
292
+
293
+ print(f"[Database] βœ… Saved summary for user {user_id} ({message_count} messages)")
294
+ except Exception as e:
295
+ print(f"[Database] ❌ Error saving summary: {e}")
296
+
297
+ def close_session(self, session_id: str):
298
+ """Mark session as inactive"""
299
+ try:
300
+ conn = sqlite3.connect(self.db_path)
301
+ cursor = conn.cursor()
302
+
303
+ cursor.execute(
304
+ "UPDATE sessions SET is_active = 0 WHERE session_id = ?",
305
+ (session_id,)
306
+ )
307
+
308
+ conn.commit()
309
+ conn.close()
310
+ except Exception as e:
311
+ print(f"[Database] ❌ Error closing session: {e}")
312
+
313
+ def cleanup_old_sessions(self, days: int = 30):
314
+ """Clean up sessions older than X days"""
315
+ try:
316
+ conn = sqlite3.connect(self.db_path)
317
+ cursor = conn.cursor()
318
+
319
+ cutoff = datetime.now() - timedelta(days=days)
320
+
321
+ cursor.execute(
322
+ "DELETE FROM sessions WHERE last_activity < ? AND is_active = 0",
323
+ (cutoff,)
324
+ )
325
+
326
+ deleted = cursor.rowcount
327
+ conn.commit()
328
+ conn.close()
329
+
330
+ if deleted > 0:
331
+ print(f"[Database] πŸ—‘οΈ Cleaned up {deleted} old sessions")
332
+ except Exception as e:
333
+ print(f"[Database] ❌ Error cleaning sessions: {e}")