-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathApp.tsx
More file actions
1830 lines (1620 loc) · 66.5 KB
/
App.tsx
File metadata and controls
1830 lines (1620 loc) · 66.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import React, { useState, useEffect, useCallback, useRef } from 'react';
import { AppState, Message, ScenarioMode, Scenario, AudioData, Character, TefAdMode, TefQuestioningMode, TefReview } from './types';
import { useAudio } from './hooks/useAudio';
import { useDocumentHead } from './hooks/useDocumentHead';
import { useConversationTimer } from './hooks/useConversationTimer';
import { initializeSession, sendVoiceMessage, resetSession, setScenario, transcribeAndCleanupAudio, generateCharacterSpeech, PIPELINE_MAX_MS } from './services/geminiService';
import { processScenarioDescriptionOpenAI } from './services/openaiService';
import { clearHistory } from './services/conversationHistory';
import { hasApiKeyOrEnv } from './services/apiKeyService';
import { assignVoicesToCharacters } from './services/voiceService';
import { generateId, generateTefAdSystemInstruction, generateTefQuestioningSystemInstruction } from './services/scenarioService';
import { generateTefReview } from './services/tefReviewService';
import { Orb } from './components/Orb';
import { Controls } from './components/Controls';
import { ConversationHistory } from './components/ConversationHistory';
import { ScenarioSetup } from './components/ScenarioSetup';
import { AdPersuasionSetup } from './components/AdPersuasionSetup';
import { AdQuestioningSetup } from './components/AdQuestioningSetup';
import { PersuasionTimer } from './components/PersuasionTimer';
import { QuestioningTimer } from './components/QuestioningTimer';
import { TefQuestioningSummary } from './components/TefQuestioningSummary';
import { TefAdSummary } from './components/TefAdSummary';
import { AdThumbnail } from './components/AdThumbnail';
import { ImageLightbox } from './components/ImageLightbox';
import { ApiKeySetup } from './components/ApiKeySetup';
import { GearIcon } from './components/icons/GearIcon';
import { ConversationHint } from './components/ConversationHint';
import { PracticeModeSheet } from './components/PracticeModeSheet';
import { combineAbortSignals } from './utils/combineAbortSignals';
import { isAbortLikeError } from './utils/isAbortLikeError';
const App: React.FC = () => {
// SEO metadata - similar to Next.js metadata export
useDocumentHead({
title: 'Parle - Practice Speaking French with AI',
description: 'Practice French conversation with AI using voice interaction. Learn real-world French with personalized scenarios and receive instant feedback.',
ogTitle: 'Parle - Practice Speaking French with AI',
ogDescription: 'Practice French conversation with AI using voice interaction. Learn real-world French with personalized scenarios and receive instant feedback.',
ogType: 'website',
});
const [appState, setAppState] = useState<AppState>(AppState.IDLE);
const [playbackSpeed, setPlaybackSpeed] = useState(1.0);
const [hasStarted, setHasStarted] = useState(false);
const [messages, setMessages] = useState<Message[]>([]);
const messagesRef = useRef<Message[]>(messages);
messagesRef.current = messages;
const [autoPlayMessageId, setAutoPlayMessageId] = useState<number | null>(null);
const [currentHint, setCurrentHint] = useState<string | null>(null);
// Scenario mode state
const [scenarioMode, setScenarioMode] = useState<ScenarioMode>('none');
const [activeScenario, setActiveScenario] = useState<Scenario | null>(null);
const [scenarioDescription, setScenarioDescription] = useState('');
const [scenarioName, setScenarioName] = useState('');
const [aiSummary, setAiSummary] = useState<string | null>(null);
const [isProcessingScenario, setIsProcessingScenario] = useState(false);
const [isRecordingDescription, setIsRecordingDescription] = useState(false);
const [isTranscribingDescription, setIsTranscribingDescription] = useState(false);
const [showTranscriptOptions, setShowTranscriptOptions] = useState(false);
const [rawTranscript, setRawTranscript] = useState<string | null>(null);
const [cleanedTranscript, setCleanedTranscript] = useState<string | null>(null);
const [scenarioCharacters, setScenarioCharacters] = useState<Character[]>([]); // NEW: Characters for scenario
// TEF Ad state
const [tefAdMode, setTefAdMode] = useState<TefAdMode>('none');
const [tefAdImage, setTefAdImage] = useState<string | null>(null);
const [showLightbox, setShowLightbox] = useState(false);
const [tefTimedUp, setTefTimedUp] = useState(false);
const [tefAdTurnCount, setTefAdTurnCount] = useState(0);
const [tefAdIsFirstMessage, setTefAdIsFirstMessage] = useState(true);
// TEF Ad conversation timer
const { elapsed: tefElapsed, reset: resetTefTimer } = useConversationTimer(
appState,
tefAdMode === 'practice',
() => setTefTimedUp(true)
);
// TEF Questioning state
const [tefQuestioningMode, setTefQuestioningMode] = useState<TefQuestioningMode>('none');
const [tefQuestioningImage, setTefQuestioningImage] = useState<string | null>(null);
const [tefQuestioningTimedUp, setTefQuestioningTimedUp] = useState(false);
const [tefQuestioningQuestionCount, setTefQuestioningQuestionCount] = useState(0);
const [tefQuestioningRepeatCount, setTefQuestioningRepeatCount] = useState(0);
const [tefQuestioningIsFirstMessage, setTefQuestioningIsFirstMessage] = useState(true);
const [showTefQuestioningSummary, setShowTefQuestioningSummary] = useState(false);
// Review state — arrays to support carousel/regenerate
const [tefQuestioningReviews, setTefQuestioningReviews] = useState<TefReview[]>([]);
const [tefQuestioningReviewIndex, setTefQuestioningReviewIndex] = useState(0);
const [tefQuestioningReviewLoading, setTefQuestioningReviewLoading] = useState(false);
const [tefQuestioningReviewError, setTefQuestioningReviewError] = useState<string | null>(null);
const [showTefAdSummary, setShowTefAdSummary] = useState(false);
const [tefAdReviews, setTefAdReviews] = useState<TefReview[]>([]);
const [tefAdReviewIndex, setTefAdReviewIndex] = useState(0);
const [tefAdReviewLoading, setTefAdReviewLoading] = useState(false);
const [tefAdReviewError, setTefAdReviewError] = useState<string | null>(null);
// Snapshot refs for retry/regenerate after messages state is cleared
const tefQuestioningMessagesSnapshotRef = useRef<Message[]>([]);
const tefAdMessagesSnapshotRef = useRef<Message[]>([]);
// Refs to keep handleTefQuestioningTimeUp (useCallback with []) in sync with current state.
// Sync assignments (.current = ...) happen after tefQuestioningElapsed is declared below.
const tefQuestioningElapsedRef = useRef(0);
const activeScenarioRef = useRef<Scenario | null>(null);
activeScenarioRef.current = activeScenario;
// AbortController for cancelling in-flight requests (declared here so timer callback can use it)
const abortControllerRef = useRef<AbortController | null>(null);
// ---------------------------------------------------------------------------
// TEF Questioning review helpers
// ---------------------------------------------------------------------------
const startTefQuestioningReview = (snapshot: Message[]) => {
const adSummary = activeScenarioRef.current?.aiSummary;
setTefQuestioningReviews([]);
setTefQuestioningReviewIndex(0);
setTefQuestioningReviewError(null);
setTefQuestioningReviewLoading(true);
generateTefReview({
exerciseType: 'questioning',
messages: snapshot,
adSummary,
elapsedSeconds: tefQuestioningElapsedRef.current,
})
.then((r) => {
if (r) {
setTefQuestioningReviews([r]);
setTefQuestioningReviewIndex(0);
}
})
.catch((e) => {
setTefQuestioningReviewError(e instanceof Error ? e.message : 'Review failed');
})
.finally(() => setTefQuestioningReviewLoading(false));
};
const regenerateTefQuestioningReview = (snapshot: Message[]) => {
const adSummary = activeScenarioRef.current?.aiSummary;
setTefQuestioningReviewError(null);
setTefQuestioningReviewLoading(true);
generateTefReview({
exerciseType: 'questioning',
messages: snapshot,
adSummary,
elapsedSeconds: tefQuestioningElapsedRef.current,
})
.then((r) => {
if (r) {
setTefQuestioningReviews((prev) => {
const next = [...prev, r];
setTefQuestioningReviewIndex(next.length - 1);
return next;
});
}
})
.catch((e) => {
setTefQuestioningReviewError(e instanceof Error ? e.message : 'Review failed');
})
.finally(() => setTefQuestioningReviewLoading(false));
};
// ---------------------------------------------------------------------------
// TEF Ad review helpers
// ---------------------------------------------------------------------------
const startTefAdReview = (snapshot: Message[]) => {
const adSummary = activeScenario?.aiSummary;
setTefAdReviews([]);
setTefAdReviewIndex(0);
setTefAdReviewError(null);
setTefAdReviewLoading(true);
generateTefReview({
exerciseType: 'persuasion',
messages: snapshot,
adSummary,
elapsedSeconds: tefElapsed,
})
.then((r) => {
if (r) {
setTefAdReviews([r]);
setTefAdReviewIndex(0);
}
})
.catch((e) => {
setTefAdReviewError(e instanceof Error ? e.message : 'Review failed');
})
.finally(() => setTefAdReviewLoading(false));
};
const regenerateTefAdReview = (snapshot: Message[]) => {
const adSummary = activeScenario?.aiSummary;
setTefAdReviewError(null);
setTefAdReviewLoading(true);
generateTefReview({
exerciseType: 'persuasion',
messages: snapshot,
adSummary,
elapsedSeconds: tefElapsed,
})
.then((r) => {
if (r) {
setTefAdReviews((prev) => {
const next = [...prev, r];
setTefAdReviewIndex(next.length - 1);
return next;
});
}
})
.catch((e) => {
setTefAdReviewError(e instanceof Error ? e.message : 'Review failed');
})
.finally(() => setTefAdReviewLoading(false));
};
// TEF Questioning conversation timer (5-minute limit)
const handleTefQuestioningTimeUp = useCallback(() => {
// Treat this as an intentional abort so processAudioMessage doesn't
// fall through into ERROR UI on AbortError.
processingAbortedRef.current = true;
if (abortControllerRef.current) {
abortControllerRef.current.abort();
abortControllerRef.current = null;
}
setTefQuestioningTimedUp(true);
setShowTefQuestioningSummary(true);
const snapshot = messagesRef.current;
tefQuestioningMessagesSnapshotRef.current = snapshot;
startTefQuestioningReview(snapshot);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const { elapsed: tefQuestioningElapsed } = useConversationTimer(
appState,
tefQuestioningMode === 'practice' && !showTefQuestioningSummary,
handleTefQuestioningTimeUp,
300
);
// Keep ref in sync so the memoized handleTefQuestioningTimeUp always reads the latest value
tefQuestioningElapsedRef.current = tefQuestioningElapsed;
// Audio retry state - stores last recorded audio for retry on failure
const [lastChatAudio, setLastChatAudio] = useState<AudioData | null>(null);
const [lastDescriptionAudio, setLastDescriptionAudio] = useState<AudioData | null>(null);
const [canRetryChatAudio, setCanRetryChatAudio] = useState(false);
const [canRetryDescriptionAudio, setCanRetryDescriptionAudio] = useState(false);
const [retryingMessageTimestamps, setRetryingMessageTimestamps] = useState<Set<number>>(new Set());
// Practice mode sheet state
const [showModeSheet, setShowModeSheet] = useState(false);
// API Key management state
const [showApiKeyModal, setShowApiKeyModal] = useState(false);
const [apiKeyCheckDone, setApiKeyCheckDone] = useState(false);
// Ref to track if we're recording for scenario description
const scenarioRecordingRef = useRef(false);
const scenarioSetupOpenRef = useRef(false);
// Abort + stale guarding for scenario description transcription requests.
const scenarioDescriptionAbortControllerRef = useRef<AbortController | null>(null);
const scenarioDescriptionRequestIdRef = useRef(0);
// Ref to track if processing was aborted by the user
const processingAbortedRef = useRef(false);
/** Set before abort for user cancel (orb) or pipeline deadline; cleared in processAudioMessage finally/catch. */
const pipelineFailureKindRef = useRef<'timeout' | 'user_cancel' | null>(null);
// Request ID counter to detect stale responses from previous requests
const requestIdRef = useRef(0);
// Error flash state
const [errorFlashVisible, setErrorFlashVisible] = useState(false);
const [errorFlashMessage, setErrorFlashMessage] = useState<string>('');
/** Persistent copy for ERROR status line (aligned with flash on pipeline failures). */
const [chatProcessingErrorMessage, setChatProcessingErrorMessage] = useState('');
const hasMessages = messages.length > 0;
const geminiKeyMissing = apiKeyCheckDone && !hasApiKeyOrEnv('gemini');
/**
* Shows an error flash message that auto-dismisses after 3 seconds
*/
const showErrorFlash = useCallback((message?: string) => {
const errorMsg = message || 'An error occurred. Please try again.';
setErrorFlashMessage(errorMsg);
setErrorFlashVisible(true);
setTimeout(() => {
setErrorFlashVisible(false);
setErrorFlashMessage('');
}, 3000);
}, []);
const {
isRecording,
isPlaying,
volume,
startRecording,
stopRecording,
cancelRecording,
getAudioContext,
checkMicrophonePermission,
requestMicrophonePermission
} = useAudio();
// Check for API keys on mount; never show modal on load so user can see the app first
useEffect(() => {
const checkApiKeys = async () => {
setApiKeyCheckDone(true);
if (hasApiKeyOrEnv('gemini')) {
try {
await initializeSession();
} catch (error) {
console.error('Failed to initialize Gemini session:', error);
}
}
};
checkApiKeys();
}, []);
// Handle API key save - re-initialize services if needed
const handleApiKeySave = async () => {
// Re-initialize Gemini session if Gemini key is now available
if (hasApiKeyOrEnv('gemini')) {
try {
await initializeSession();
} catch (error) {
console.error('Failed to re-initialize Gemini session:', error);
}
}
setApiKeyCheckDone(true);
};
// Handle API key modal close
const handleApiKeyModalClose = () => {
setShowApiKeyModal(false);
// Mark that user has been offered the chance to enter keys
setApiKeyCheckDone(true);
};
const handleCancelRecording = useCallback(() => {
if (appState === AppState.RECORDING) {
cancelRecording();
setAppState(AppState.IDLE);
}
}, [appState, cancelRecording]);
// Handle Escape key to cancel recording
useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => {
if (event.key === 'Escape' && appState === AppState.RECORDING) {
handleCancelRecording();
}
};
window.addEventListener('keydown', handleKeyDown);
return () => {
window.removeEventListener('keydown', handleKeyDown);
};
}, [appState, handleCancelRecording]);
// Handle Playback Speed updates
const handleSpeedChange = (speed: number) => {
setPlaybackSpeed(speed);
};
const handleStartInteraction = async () => {
// Boilerplate to ensure AudioContext is resumed on user gesture
getAudioContext();
setHasStarted(true);
};
/**
* Check microphone permission and request if necessary.
* Returns true if permission is granted, false otherwise.
*/
const ensureMicrophonePermission = useCallback(async (): Promise<boolean> => {
const permissionState = await checkMicrophonePermission();
if (permissionState === 'granted') {
return true;
}
if (permissionState === 'denied') {
alert('Microphone access has been denied. Please enable microphone access in your browser settings to use voice recording.');
return false;
}
// For 'prompt' or 'unsupported' states, request permission
const granted = await requestMicrophonePermission();
if (!granted) {
alert('Microphone access is required for voice recording. Please allow microphone access and try again.');
return false;
}
return true;
}, [checkMicrophonePermission, requestMicrophonePermission]);
const handleStartRecording = async () => {
if (!hasApiKeyOrEnv('gemini')) {
setShowApiKeyModal(true);
return;
}
if (!hasStarted) await handleStartInteraction();
// Check and request microphone permission before starting
const hasPermission = await ensureMicrophonePermission();
if (!hasPermission) {
return;
}
// Clear retry state when starting a new recording
setCanRetryChatAudio(false);
setLastChatAudio(null);
setChatProcessingErrorMessage('');
setAppState(AppState.RECORDING);
await startRecording();
};
/**
* Processes audio data (from recording or retry) and sends it to the AI
*/
const processAudioMessage = async (audioData: AudioData) => {
processingAbortedRef.current = false;
const userAbort = new AbortController();
abortControllerRef.current = userAbort;
const deadlineAbort = new AbortController();
let deadlineTimeoutId: ReturnType<typeof setTimeout> | undefined;
// Increment request ID so we can detect stale responses
const currentRequestId = ++requestIdRef.current;
setAppState(AppState.PROCESSING);
setChatProcessingErrorMessage('');
try {
deadlineTimeoutId = setTimeout(() => {
pipelineFailureKindRef.current = 'timeout';
deadlineAbort.abort();
}, PIPELINE_MAX_MS);
const pipelineSignal = combineAbortSignals(userAbort.signal, deadlineAbort.signal);
const { base64, mimeType } = audioData;
// Build phase-based per-turn context for TEF Ad practice
// Skip context injection for the very first message (greeting turn)
let phaseContextText: string | undefined;
if (tefAdMode === 'practice' && !tefAdIsFirstMessage) {
const turnNumber = tefAdTurnCount + 1;
if (turnNumber <= 2) {
phaseContextText = '[Per-turn context: Encourage the user to introduce and present the advertisement clearly and in an interesting way.]';
} else if (turnNumber <= 4) {
phaseContextText = '[Per-turn context: The user should be developing concrete arguments with examples. If they give a bare assertion without a concrete example, ask "Tu peux me donner un exemple concret?"]';
} else {
phaseContextText = '[Per-turn context: Push back with a counter-argument or nuance ("Oui mais...", "Tu ne penses pas que..."). The user should demonstrate they can handle objections and nuance their position.]';
}
}
// Use Gemini for speaking practice
const response = await sendVoiceMessage(
base64,
mimeType,
pipelineSignal,
phaseContextText
);
// Check if user aborted or a newer request has started (stale response)
if (processingAbortedRef.current || currentRequestId !== requestIdRef.current) {
if (response.audioUrl) {
if (Array.isArray(response.audioUrl)) {
response.audioUrl.forEach(url => {
URL.revokeObjectURL(url);
});
} else {
URL.revokeObjectURL(response.audioUrl);
}
}
return;
}
// Handle multi-character response
if (Array.isArray(response.audioUrl)) {
// Validate multi-character response structure
if (!response.characters || !Array.isArray(response.characters)) {
console.error('Multi-character response missing characters array');
const invalidMsg = 'Invalid response format from AI';
setChatProcessingErrorMessage(invalidMsg);
setCanRetryChatAudio(true);
setAppState(AppState.ERROR);
showErrorFlash(invalidMsg);
return;
}
if (!Array.isArray(response.modelText)) {
console.error('Multi-character response has invalid modelText');
const invalidMsg = 'Invalid response format from AI';
setChatProcessingErrorMessage(invalidMsg);
setCanRetryChatAudio(true);
setAppState(AppState.ERROR);
showErrorFlash(invalidMsg);
return;
}
// Verify all arrays have matching lengths
const audioUrls = response.audioUrl;
const characters = response.characters;
const modelTexts = response.modelText;
if (audioUrls.length !== characters.length || audioUrls.length !== modelTexts.length) {
console.error('Multi-character response arrays have mismatched lengths', {
audioUrls: audioUrls.length,
characters: characters.length,
modelTexts: modelTexts.length
});
const invalidMsg = 'Invalid response format from AI';
setChatProcessingErrorMessage(invalidMsg);
setCanRetryChatAudio(true);
setAppState(AppState.ERROR);
showErrorFlash(invalidMsg);
return;
}
const timestamp = Date.now();
// Create a blob URL for the user's recorded audio so the review service
// can evaluate actual speech rather than relying solely on transcripts.
const userAudioBlob = new Blob(
[Uint8Array.from(atob(base64), c => c.charCodeAt(0))],
{ type: mimeType }
);
const userAudioUrl = URL.createObjectURL(userAudioBlob);
const userMessage: Message = { role: 'user', text: response.userText, timestamp, audioUrl: userAudioUrl };
// Create separate messages for each character
const modelMessages: Message[] = characters.map((char, idx) => ({
role: 'model' as const,
text: modelTexts[idx],
timestamp: timestamp + idx + 1,
audioUrl: audioUrls[idx],
characterId: char.characterId,
characterName: char.characterName,
voiceName: char.voiceName,
hint: idx === characters.length - 1 ? response.hint : undefined,
audioGenerationFailed: char.audioGenerationFailed || false,
frenchText: char.frenchText // Store French text for TTS retry
}));
setMessages(prev => [...prev, userMessage, ...modelMessages]);
// Update current hint (only in scenario mode, from last character)
if ((scenarioMode === 'practice' || tefAdMode === 'practice') && response.hint) {
setCurrentHint(response.hint);
}
// Set the first character message to auto-play (others will auto-play sequentially)
setAutoPlayMessageId(timestamp + 1);
} else {
// Single-character response (original behavior)
const { audioUrl, userText, modelText, hint, voiceName, audioGenerationFailed, characters } = response;
// Add messages to history (append for chronological order - newest last)
const timestamp = Date.now();
const modelTimestamp = timestamp + 1;
// Create a blob URL for the user's recorded audio so the review service
// can evaluate actual speech rather than relying solely on transcripts.
const userAudioBlob = new Blob(
[Uint8Array.from(atob(base64), c => c.charCodeAt(0))],
{ type: mimeType }
);
const userAudioUrl = URL.createObjectURL(userAudioBlob);
setMessages(prev => [
...prev,
{
role: 'user',
text: userText,
timestamp,
audioUrl: userAudioUrl,
...(tefQuestioningMode === 'practice' && !tefQuestioningIsFirstMessage && {
isRepeat: response.isRepeat,
conceptLabels: response.conceptLabels,
}),
},
{
role: 'model',
text: modelText as string,
timestamp: modelTimestamp,
audioUrl: audioUrl as string,
hint,
voiceName,
audioGenerationFailed,
frenchText: characters?.[0]?.frenchText // Store French text for TTS retry
},
]);
// Update current hint (only in scenario/ad/questioning practice mode)
if ((scenarioMode === 'practice' || tefAdMode === 'practice') && hint) {
setCurrentHint(hint);
}
if (tefQuestioningMode === 'practice' && !tefQuestioningIsFirstMessage && hint) {
setCurrentHint(hint);
}
// Set the new model message to auto-play
setAutoPlayMessageId(modelTimestamp);
}
// Success - clear retry state
setCanRetryChatAudio(false);
setLastChatAudio(null);
setChatProcessingErrorMessage('');
setAppState(AppState.IDLE);
// Persuasion first-message handling
if (tefAdMode === 'practice') {
if (tefAdIsFirstMessage) {
// First turn is a greeting — skip turn count increment, just mark first message done
setTefAdIsFirstMessage(false);
} else {
// Increment turn count after each non-first successful user turn
setTefAdTurnCount(prev => prev + 1);
}
}
// TEF Questioning: count questions (skip first message / greeting)
if (tefQuestioningMode === 'practice') {
if (tefQuestioningIsFirstMessage) {
setTefQuestioningIsFirstMessage(false);
} else {
setTefQuestioningQuestionCount(c => c + 1);
if (response.isRepeat === true) {
setTefQuestioningRepeatCount(r => r + 1);
}
}
}
} catch (error) {
const kind = pipelineFailureKindRef.current;
pipelineFailureKindRef.current = null;
// If aborted or superseded by a newer request, don't show error
if (processingAbortedRef.current || currentRequestId !== requestIdRef.current) {
return;
}
const isAbort = isAbortLikeError(error);
const defaultMsg = 'Connection error. Please try again.';
if (isAbort && kind === 'user_cancel') {
const msg = 'You canceled the processing.';
setChatProcessingErrorMessage(msg);
setCanRetryChatAudio(true);
setAppState(AppState.ERROR);
showErrorFlash(msg);
return;
}
if (isAbort && kind === 'timeout') {
const msg = 'Connection timed out';
setChatProcessingErrorMessage(msg);
setCanRetryChatAudio(true);
setAppState(AppState.ERROR);
showErrorFlash(msg);
return;
}
console.error("Interaction failed", error);
setChatProcessingErrorMessage(defaultMsg);
setCanRetryChatAudio(true);
setAppState(AppState.ERROR);
showErrorFlash(defaultMsg);
} finally {
if (deadlineTimeoutId !== undefined) {
clearTimeout(deadlineTimeoutId);
}
abortControllerRef.current = null;
pipelineFailureKindRef.current = null;
}
};
const handleStopRecording = async () => {
processingAbortedRef.current = false;
setAppState(AppState.PROCESSING);
try {
// Destructure base64 and mimeType from the hook
const { base64, mimeType } = await stopRecording();
// Check if user aborted while recording was stopping
if (processingAbortedRef.current) {
return;
}
// Store the audio for potential retry
const audioData: AudioData = { base64, mimeType };
setLastChatAudio(audioData);
// Process the audio
await processAudioMessage(audioData);
} catch (error) {
// If aborted, don't show error
if (processingAbortedRef.current) {
return;
}
console.error("Interaction failed", error);
const defaultMsg = 'Connection error. Please try again.';
setChatProcessingErrorMessage(defaultMsg);
setAppState(AppState.ERROR);
showErrorFlash(defaultMsg);
}
};
/**
* Retry sending the last recorded chat audio
*/
const handleRetryChatAudio = async () => {
if (!lastChatAudio) {
console.error("No audio to retry");
return;
}
await processAudioMessage(lastChatAudio);
};
/**
* Retry generating audio for a specific message that failed
*/
const handleRetryAudioGeneration = async (messageTimestamp: number) => {
const message = messages.find(m => m.timestamp === messageTimestamp);
if (!message || !message.audioGenerationFailed || !message.voiceName) {
console.error("Cannot retry audio generation for this message", {
hasMessage: !!message,
audioGenerationFailed: message?.audioGenerationFailed,
voiceName: message?.voiceName
});
return;
}
// Add this message to the retrying set
setRetryingMessageTimestamps(prev => new Set(prev).add(messageTimestamp));
try {
// Use French-only text for TTS retry (falls back to full text if frenchText not available)
const textForTTS = message.frenchText || message.text;
const audioUrl = await generateCharacterSpeech(textForTTS, message.voiceName);
// Update message with new audio
setMessages(prev => prev.map(m =>
m.timestamp === messageTimestamp
? { ...m, audioUrl, audioGenerationFailed: false }
: m
));
} catch (err) {
console.error('Audio generation retry failed:', err);
// Could show a toast notification here
} finally {
// Remove this message from the retrying set
setRetryingMessageTimestamps(prev => {
const next = new Set(prev);
next.delete(messageTimestamp);
return next;
});
}
};
// Abort handler - cancels in-flight processing (orb tap during PROCESSING)
const handleAbortProcessing = useCallback(() => {
if (!abortControllerRef.current) {
return;
}
pipelineFailureKindRef.current = 'user_cancel';
abortControllerRef.current.abort();
// ERROR + retry are set in processAudioMessage catch (do not set processingAbortedRef — that is for exercise-exit suppress)
}, []);
// Orb click handler - toggle recording or abort processing
const handleOrbClick = () => {
if (appState === AppState.RECORDING) {
handleStopRecording();
} else if (appState === AppState.PROCESSING) {
handleAbortProcessing();
} else {
handleStartRecording();
}
};
const handleClearHistory = async () => {
try {
// Revoke all audio URLs before clearing messages
messages.forEach(msg => {
if (msg.audioUrl) {
if (Array.isArray(msg.audioUrl)) {
msg.audioUrl.forEach(url => {
URL.revokeObjectURL(url);
});
} else {
URL.revokeObjectURL(msg.audioUrl);
}
}
});
// Clear shared conversation history
clearHistory();
// Clear UI messages and hint
setMessages([]);
setAutoPlayMessageId(null);
setCurrentHint(null);
// Always reset Gemini session when clearing history, preserving scenario if active
resetSession(activeScenario);
} catch (error) {
console.error("Error clearing history:", error);
// Show error to user
showErrorFlash();
// Still clear UI messages even if resetSession fails
setMessages([]);
setAutoPlayMessageId(null);
setCurrentHint(null);
}
};
// Scenario mode handlers
const abortScenarioDescriptionTranscription = useCallback(() => {
if (scenarioDescriptionAbortControllerRef.current) {
scenarioDescriptionAbortControllerRef.current.abort();
scenarioDescriptionAbortControllerRef.current = null;
}
}, []);
const handleOpenScenarioSetup = () => {
scenarioSetupOpenRef.current = true;
setScenarioMode('setup');
setScenarioDescription('');
setScenarioName('');
setAiSummary(null);
setShowTranscriptOptions(false);
setRawTranscript(null);
setCleanedTranscript(null);
};
const handleCloseScenarioSetup = () => {
// Invalidate any in-flight transcription so late-resolving promises
// cannot overwrite transcript state after close+reopen.
scenarioDescriptionRequestIdRef.current += 1;
abortScenarioDescriptionTranscription();
scenarioSetupOpenRef.current = false;
setScenarioMode('none');
setScenarioDescription('');
setScenarioName('');
setAiSummary(null);
setScenarioCharacters([]); // Clear characters
setIsRecordingDescription(false);
setIsTranscribingDescription(false);
setShowTranscriptOptions(false);
setRawTranscript(null);
setCleanedTranscript(null);
// Clear description retry state
setCanRetryDescriptionAudio(false);
setLastDescriptionAudio(null);
if (scenarioRecordingRef.current) {
cancelRecording();
scenarioRecordingRef.current = false;
}
};
const handleStartRecordingDescription = async () => {
// Scenario creation requires both Gemini (transcription) and OpenAI (planning)
if (!hasApiKeyOrEnv('gemini') || !hasApiKeyOrEnv('openai')) {
setShowApiKeyModal(true);
return;
}
try {
getAudioContext();
// Check and request microphone permission before starting
const hasPermission = await ensureMicrophonePermission();
if (!hasPermission) {
return;
}
// Clear retry state when starting a new recording
setCanRetryDescriptionAudio(false);
setLastDescriptionAudio(null);
scenarioRecordingRef.current = true;
setIsRecordingDescription(true);
await startRecording();
} catch (error) {
console.error('Error starting recording description:', error);
scenarioRecordingRef.current = false;
setIsRecordingDescription(false);
throw error;
}
};
/**
* Process description audio (from recording or retry) and transcribe it
*/
const processDescriptionAudio = async (audioData: AudioData): Promise<void> => {
// Increment request ID so we can ignore stale results even if the modal
// is closed and then re-opened while a previous transcription is still in-flight.
const currentRequestId = ++scenarioDescriptionRequestIdRef.current;
// Abort any previous transcription attempt (e.g., quick retry or close+reopen races).
abortScenarioDescriptionTranscription();
const abortController = new AbortController();
scenarioDescriptionAbortControllerRef.current = abortController;
setIsTranscribingDescription(true);
try {
const { base64, mimeType } = audioData;
// Single LLM call to transcribe and clean up the audio - using Gemini
const { rawTranscript: rawText, cleanedTranscript: cleanedText } = await transcribeAndCleanupAudio(
base64,
mimeType,
abortController.signal
);
// Discard results if a newer request started or the modal has been closed.
if (currentRequestId !== scenarioDescriptionRequestIdRef.current || !scenarioSetupOpenRef.current) {
return;
}
setRawTranscript(rawText);
setCleanedTranscript(cleanedText);
if (!rawText.trim() || !cleanedText.trim()) {
showErrorFlash('Transcription was empty. Please try again.');
// Keep retry available for empty transcription
setCanRetryDescriptionAudio(true);
return;
}
// Success - clear retry state
setCanRetryDescriptionAudio(false);
setLastDescriptionAudio(null);
setShowTranscriptOptions(true);
} catch (error) {
// If this was an intentional cancellation, discard silently.
const errName = error instanceof DOMException ? error.name : (error as any)?.name;
if (errName === 'AbortError') {
return;
}
if (currentRequestId !== scenarioDescriptionRequestIdRef.current || !scenarioSetupOpenRef.current) {
return;
}
console.error('Error transcribing description:', error);
if (scenarioSetupOpenRef.current) {
// Enable retry with the stored audio
setCanRetryDescriptionAudio(true);
showErrorFlash('Failed to transcribe audio. Please try again.');
}
} finally {
// Only clear the spinner for the latest attempt; stale requests must not
// affect transcript UI state after a close+reopen race.
if (currentRequestId === scenarioDescriptionRequestIdRef.current) {
setIsTranscribingDescription(false);
}
if (scenarioDescriptionAbortControllerRef.current === abortController) {
scenarioDescriptionAbortControllerRef.current = null;
}
}
};
const handleStopRecordingDescription = async (): Promise<void> => {
scenarioRecordingRef.current = false;
setIsRecordingDescription(false);
try {
const { base64, mimeType } = await stopRecording();
// Store the audio for potential retry
const audioData: AudioData = { base64, mimeType };
setLastDescriptionAudio(audioData);
// Process the audio
await processDescriptionAudio(audioData);
} catch (error) {
console.error('Error stopping recording description:', error);
if (scenarioSetupOpenRef.current) {
showErrorFlash('Failed to process recording. Please try again.');
}
}
};
/**
* Retry transcribing the last recorded description audio
*/
const handleRetryDescriptionAudio = async (): Promise<void> => {
if (!lastDescriptionAudio) {
console.error("No audio to retry");
return;