-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest-modules.js
More file actions
156 lines (123 loc) · 5.33 KB
/
test-modules.js
File metadata and controls
156 lines (123 loc) · 5.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
/**
* TEST-MODULES.JS - Test Response and Audio Modules
* Simple testing script to validate module functionality
*/
// ============================================================================
// TEST CONFIGURATION
// ============================================================================
// Voice settings and orb functions are now handled by audio.js and orb.js modules
// ============================================================================
// TEST FUNCTIONS
// ============================================================================
/**
* Test AI response module
*/
async function testAIResponseModule() {
console.log('\n=== TESTING AI RESPONSE MODULE ===');
try {
// Test basic AI response
console.log('\n--- Testing Basic AI Response ---');
const userInput = 'hello there';
console.log('User Input:', userInput);
const response = await AI.getAIResponse(userInput);
console.log('AI Response:', response);
// Test complex AI response
console.log('\n--- Testing Complex AI Response ---');
const complexInput = 'explain how machine learning works';
console.log('User Input:', complexInput);
const complexResponse = await AI.getAIResponse(complexInput);
console.log('AI Response:', complexResponse);
// Test AI response format validation
console.log('\n--- Testing Response Format ---');
const lines = response.split('\n').filter(line => line.trim().startsWith('-'));
console.log('Bullet points found:', lines.length);
console.log('Format validation:', lines.every(line => line.includes('[') && line.includes(']')));
console.log('\n✅ AI RESPONSE MODULE TESTS COMPLETED');
} catch (error) {
console.error('❌ AI RESPONSE MODULE TEST FAILED:', error);
}
}
/**
* Test audio module
*/
async function testAudioModule() {
console.log('\n=== TESTING AUDIO MODULE ===');
try {
// Test single audio generation
console.log('\n--- Testing Single Audio Generation ---');
const testText = 'Hello there!';
const voiceId = AudioModule.voices.liam;
const agent = 'liam';
console.log('Generating audio for:', testText);
const audioBuffer = await AudioModule.generateSpeech(testText, voiceId, agent);
console.log('Audio buffer generated, size:', audioBuffer.byteLength, 'bytes');
// Test multiple audio generation
console.log('\n--- Testing Multiple Audio Generation ---');
const sentences = ['Hello there!', 'How are you doing?'];
console.log('Generating audio for sentences:', sentences);
const audioBuffers = await AudioModule.generateMultipleAudio(sentences, voiceId, agent);
console.log('Generated', audioBuffers.length, 'audio buffers');
console.log('\n✅ AUDIO MODULE TESTS COMPLETED');
} catch (error) {
console.error('❌ AUDIO MODULE TEST FAILED:', error);
}
}
/**
* Test complete workflow with new modules
*/
async function testCompleteWorkflow() {
console.log('\n=== TESTING COMPLETE WORKFLOW (NEW MODULES) ===');
try {
const userInput = 'hey there';
console.log('User Input:', userInput);
// Step 1: Get AI response using airesponse.js
console.log('\n--- Step 1: Getting AI Response ---');
const aiResponse = await AI.getAIResponse(userInput);
console.log('AI Response:', aiResponse);
// Step 2: Process for audio (split bullet points)
console.log('\n--- Step 2: Processing for Audio ---');
const sentences = aiResponse
.split('\n')
.filter(line => line.trim().startsWith('-'))
.map(line => line.trim().substring(2))
.filter(s => s.length > 0);
console.log('Sentences for audio:', sentences);
// Step 3: Generate audio using audio.js
console.log('\n--- Step 3: Generating Audio ---');
const voiceId = AudioModule.voices.liam;
const agent = 'liam';
await AudioModule.streamingAudioPlayback(sentences, voiceId, agent);
console.log('Audio generation and playback completed');
console.log('\n✅ COMPLETE WORKFLOW TEST COMPLETED');
} catch (error) {
console.error('❌ COMPLETE WORKFLOW TEST FAILED:', error);
}
}
// ============================================================================
// RUN TESTS
// ============================================================================
/**
* Run all tests
*/
async function runAllTests() {
console.log('🚀 STARTING MODULE TESTS (UPDATED)');
console.log('==================================');
try {
await testAIResponseModule();
await testAudioModule();
await testCompleteWorkflow();
console.log('\n🎉 ALL TESTS COMPLETED SUCCESSFULLY');
} catch (error) {
console.error('\n💥 TESTS FAILED:', error);
}
}
// Auto-run tests if this script is executed directly
if (typeof window !== 'undefined') {
// Browser environment
console.log('Running tests in browser...');
runAllTests();
} else {
// Node.js environment
console.log('Running tests in Node.js...');
runAllTests();
}