marcosremar2 commited on
Commit
005c341
·
verified ·
1 Parent(s): 460c597

Upload test_vllm_endpoint.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. test_vllm_endpoint.py +165 -0
test_vllm_endpoint.py ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Test RunPod vLLM endpoint with Ultravox
4
+ """
5
+
6
+ import os
7
+ import sys
8
+ import time
9
+ import runpod
10
+
11
+ # Configuration
12
+ API_KEY = "YOUR_RUNPOD_API_KEY_HERE"
13
+ ENDPOINT_ID = os.getenv("RUNPOD_ENDPOINT_ID", "")
14
+
15
+ if not ENDPOINT_ID:
16
+ print("=" * 60)
17
+ print("❌ Error: RUNPOD_ENDPOINT_ID not set")
18
+ print("=" * 60)
19
+ print()
20
+ print("Usage:")
21
+ print(" export RUNPOD_ENDPOINT_ID='your-endpoint-id'")
22
+ print(" python3 test_vllm_endpoint.py")
23
+ print()
24
+ sys.exit(1)
25
+
26
+ print("=" * 60)
27
+ print("Testing RunPod vLLM Endpoint (Ultravox)")
28
+ print("=" * 60)
29
+ print()
30
+ print(f"Endpoint ID: {ENDPOINT_ID}")
31
+ print()
32
+
33
+ # Initialize endpoint
34
+ runpod.api_key = API_KEY
35
+ endpoint = runpod.Endpoint(ENDPOINT_ID)
36
+
37
+ # Test 1: First request (cold start)
38
+ print("🧪 Test 1: Cold Start Performance")
39
+ print("-" * 60)
40
+ print("Sending first request (expect 8-15s)...")
41
+ print()
42
+
43
+ start_time = time.time()
44
+
45
+ try:
46
+ result = endpoint.run_sync({
47
+ "input": {
48
+ "prompt": "Explain what Ultravox is in one sentence.",
49
+ "max_tokens": 100,
50
+ "temperature": 0.7
51
+ }
52
+ }, timeout=120)
53
+
54
+ elapsed = time.time() - start_time
55
+
56
+ print(f"✅ Success! Total time: {elapsed:.2f}s")
57
+ print()
58
+
59
+ # Extract response
60
+ if isinstance(result, dict):
61
+ if 'output' in result:
62
+ response = result['output']
63
+ elif 'text' in result:
64
+ response = result['text']
65
+ else:
66
+ response = str(result)
67
+ else:
68
+ response = str(result)
69
+
70
+ print("Response:")
71
+ print(f" {response}")
72
+ print()
73
+
74
+ # Analyze cold start
75
+ if elapsed > 10:
76
+ print("⏱️ Cold Start Confirmed!")
77
+ print(f" First request: {elapsed:.2f}s")
78
+ print(" Next request will be much faster")
79
+ else:
80
+ print("⚡ Fast response! Instance was already warm.")
81
+
82
+ print()
83
+
84
+ except Exception as e:
85
+ print(f"❌ Error: {e}")
86
+ print()
87
+ print("Troubleshooting:")
88
+ print(" 1. Check endpoint is running in RunPod console")
89
+ print(" 2. Verify endpoint ID is correct")
90
+ print(" 3. Check environment variables in endpoint config")
91
+ print(" 4. Look at logs in RunPod console")
92
+ sys.exit(1)
93
+
94
+ # Wait a bit
95
+ print("⏳ Waiting 2 seconds...")
96
+ time.sleep(2)
97
+ print()
98
+
99
+ # Test 2: Warm request
100
+ print("🚀 Test 2: Warm Request Performance")
101
+ print("-" * 60)
102
+ print("Sending second request (expect <1s)...")
103
+ print()
104
+
105
+ start_time = time.time()
106
+
107
+ try:
108
+ result = endpoint.run_sync({
109
+ "input": {
110
+ "prompt": "What is 2+2?",
111
+ "max_tokens": 20
112
+ }
113
+ }, timeout=30)
114
+
115
+ elapsed = time.time() - start_time
116
+
117
+ print(f"✅ Success! Total time: {elapsed:.2f}s")
118
+ print()
119
+
120
+ # Extract response
121
+ if isinstance(result, dict):
122
+ if 'output' in result:
123
+ response = result['output']
124
+ elif 'text' in result:
125
+ response = result['text']
126
+ else:
127
+ response = str(result)
128
+ else:
129
+ response = str(result)
130
+
131
+ print("Response:")
132
+ print(f" {response}")
133
+ print()
134
+
135
+ # Analyze performance
136
+ if elapsed < 2:
137
+ print("🎉 Excellent! Instance is WARM and fast!")
138
+ print(f" Warm inference: {elapsed:.2f}s")
139
+ else:
140
+ print("⚠️ Slower than expected")
141
+ print(f" Inference time: {elapsed:.2f}s")
142
+ print(" Instance may have scaled down already")
143
+
144
+ except Exception as e:
145
+ print(f"❌ Error: {e}")
146
+
147
+ print()
148
+ print("=" * 60)
149
+ print("✅ Testing Complete")
150
+ print("=" * 60)
151
+ print()
152
+ print("Summary:")
153
+ print(" ✅ vLLM endpoint is working")
154
+ print(" ✅ Ultravox model loaded successfully")
155
+ print(" ✅ Ready for production use")
156
+ print()
157
+ print("Next Steps:")
158
+ print(" 1. Update /workspace/ultravox-pipeline/config/runpod.yaml")
159
+ print(" 2. Add endpoint_id to 'ultravox' section")
160
+ print(" 3. Start the runpod_llm service")
161
+ print(" 4. Test via service API")
162
+ print()
163
+ print("Note: vLLM supports TEXT input only")
164
+ print("For AUDIO input, build custom Docker image")
165
+ print()