JumpStart automated update
Browse filesReport key [2024-03-25-07-11-09/deploy-reports/2024-03-25-07-1/processed-report.json]
- modal.json +7 -7
modal.json
CHANGED
|
@@ -5,7 +5,7 @@
|
|
| 5 |
"id": "google/gemma-7b",
|
| 6 |
"instanceType": "ml.p5.48xlarge",
|
| 7 |
"numGpu": 8,
|
| 8 |
-
"containerStartupHealthCheckTimeout":
|
| 9 |
},
|
| 10 |
{
|
| 11 |
"id": "google/gemma-7b-it",
|
|
@@ -2068,9 +2068,9 @@
|
|
| 2068 |
},
|
| 2069 |
{
|
| 2070 |
"id": "codellama/CodeLlama-7b-hf",
|
| 2071 |
-
"instanceType": "ml.
|
| 2072 |
"numGpu": 8,
|
| 2073 |
-
"containerStartupHealthCheckTimeout":
|
| 2074 |
},
|
| 2075 |
{
|
| 2076 |
"id": "tiiuae/falcon-7b",
|
|
@@ -2098,9 +2098,9 @@
|
|
| 2098 |
},
|
| 2099 |
{
|
| 2100 |
"id": "codellama/CodeLlama-70b-hf",
|
| 2101 |
-
"instanceType": "ml.
|
| 2102 |
"numGpu": 8,
|
| 2103 |
-
"containerStartupHealthCheckTimeout":
|
| 2104 |
},
|
| 2105 |
{
|
| 2106 |
"id": "meta-llama/Llama-2-7b-chat-hf",
|
|
@@ -2122,9 +2122,9 @@
|
|
| 2122 |
},
|
| 2123 |
{
|
| 2124 |
"id": "codellama/CodeLlama-7b-Python-hf",
|
| 2125 |
-
"instanceType": "ml.
|
| 2126 |
"numGpu": 8,
|
| 2127 |
-
"containerStartupHealthCheckTimeout":
|
| 2128 |
},
|
| 2129 |
{
|
| 2130 |
"id": "google/gemma-2b",
|
|
|
|
| 5 |
"id": "google/gemma-7b",
|
| 6 |
"instanceType": "ml.p5.48xlarge",
|
| 7 |
"numGpu": 8,
|
| 8 |
+
"containerStartupHealthCheckTimeout": 1800
|
| 9 |
},
|
| 10 |
{
|
| 11 |
"id": "google/gemma-7b-it",
|
|
|
|
| 2068 |
},
|
| 2069 |
{
|
| 2070 |
"id": "codellama/CodeLlama-7b-hf",
|
| 2071 |
+
"instanceType": "ml.p4d.24xlarge",
|
| 2072 |
"numGpu": 8,
|
| 2073 |
+
"containerStartupHealthCheckTimeout": 1500
|
| 2074 |
},
|
| 2075 |
{
|
| 2076 |
"id": "tiiuae/falcon-7b",
|
|
|
|
| 2098 |
},
|
| 2099 |
{
|
| 2100 |
"id": "codellama/CodeLlama-70b-hf",
|
| 2101 |
+
"instanceType": "ml.p5.48xlarge",
|
| 2102 |
"numGpu": 8,
|
| 2103 |
+
"containerStartupHealthCheckTimeout": 1800
|
| 2104 |
},
|
| 2105 |
{
|
| 2106 |
"id": "meta-llama/Llama-2-7b-chat-hf",
|
|
|
|
| 2122 |
},
|
| 2123 |
{
|
| 2124 |
"id": "codellama/CodeLlama-7b-Python-hf",
|
| 2125 |
+
"instanceType": "ml.p4d.24xlarge",
|
| 2126 |
"numGpu": 8,
|
| 2127 |
+
"containerStartupHealthCheckTimeout": 1500
|
| 2128 |
},
|
| 2129 |
{
|
| 2130 |
"id": "google/gemma-2b",
|