remove torch pin
Browse files- handler.py +9 -8
- requirements.txt +1 -1
handler.py
CHANGED
|
@@ -137,14 +137,15 @@ class EndpointHandler():
|
|
| 137 |
all_pixel_values = all_pixel_values.to(self.model.device)
|
| 138 |
all_grid_thws = all_grid_thws.to(self.model.device)
|
| 139 |
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
|
|
|
| 148 |
else:
|
| 149 |
# Text-only generation
|
| 150 |
generated_ids = self.model.generate(
|
|
|
|
| 137 |
all_pixel_values = all_pixel_values.to(self.model.device)
|
| 138 |
all_grid_thws = all_grid_thws.to(self.model.device)
|
| 139 |
|
| 140 |
+
with torch.no_grad():
|
| 141 |
+
generated_ids = self.model.generate(
|
| 142 |
+
input_ids,
|
| 143 |
+
pixel_values=all_pixel_values,
|
| 144 |
+
grid_thws=all_grid_thws,
|
| 145 |
+
max_new_tokens=max_new_tokens,
|
| 146 |
+
temperature=temperature,
|
| 147 |
+
**parameters
|
| 148 |
+
)
|
| 149 |
else:
|
| 150 |
# Text-only generation
|
| 151 |
generated_ids = self.model.generate(
|
requirements.txt
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
transformers==4.53.0
|
| 2 |
-
torch==2.8.0
|
| 3 |
pillow==11.3.0
|
| 4 |
tiktoken==0.11.0
|
| 5 |
blobfile==3.0.0
|
|
|
|
| 1 |
transformers==4.53.0
|
| 2 |
+
# torch==2.8.0
|
| 3 |
pillow==11.3.0
|
| 4 |
tiktoken==0.11.0
|
| 5 |
blobfile==3.0.0
|