|
7 | 7 | from oci.data_science.models import ContainerSummary
|
8 | 8 | from pydantic import Field
|
9 | 9 |
|
10 |
| -from ads.aqua.common.entities import ContainerSpec |
11 | 10 | from ads.aqua.config.utils.serializer import Serializable
|
12 | 11 | from ads.aqua.constants import (
|
13 | 12 | SERVICE_MANAGED_CONTAINER_URI_SCHEME,
|
@@ -185,24 +184,31 @@ def from_service_config(
|
185 | 184 | 0
|
186 | 185 | ].additional_configurations.get(
|
187 | 186 | "MODEL_DEPLOY_PREDICT_ENDPOINT", UNKNOWN
|
188 |
| - ), |
| 187 | + ) |
| 188 | + }, |
| 189 | + { |
189 | 190 | "MODEL_DEPLOY_HEALTH_ENDPOINT": container.workload_configuration_details_list[
|
190 | 191 | 0
|
191 | 192 | ].additional_configurations.get(
|
192 | 193 | "MODEL_DEPLOY_HEALTH_ENDPOINT", UNKNOWN
|
193 |
| - ), |
| 194 | + ) |
| 195 | + }, |
| 196 | + { |
194 | 197 | "MODEL_DEPLOY_ENABLE_STREAMING": container.workload_configuration_details_list[
|
195 | 198 | 0
|
196 | 199 | ].additional_configurations.get(
|
197 | 200 | "MODEL_DEPLOY_ENABLE_STREAMING", UNKNOWN
|
198 |
| - ), |
| 201 | + ) |
| 202 | + }, |
| 203 | + { |
199 | 204 | "PORT": container.workload_configuration_details_list[
|
200 | 205 | 0
|
201 | 206 | ].additional_configurations.get("PORT", ""),
|
202 | 207 | "HEALTH_CHECK_PORT": container.workload_configuration_details_list[
|
203 | 208 | 0
|
204 | 209 | ].additional_configurations.get("HEALTH_CHECK_PORT", UNKNOWN),
|
205 |
| - } |
| 210 | + }, |
| 211 | + {}, |
206 | 212 | ]
|
207 | 213 | container_spec = AquaContainerConfigSpec(
|
208 | 214 | cli_param=container.workload_configuration_details_list[0].cmd,
|
@@ -239,88 +245,3 @@ def from_service_config(
|
239 | 245 | return cls(
|
240 | 246 | inference=inference_items, finetune=finetune_items, evaluate=evaluate_items
|
241 | 247 | )
|
242 |
| - |
243 |
| - @classmethod |
244 |
| - def from_container_index_json( |
245 |
| - cls, |
246 |
| - config: Dict, |
247 |
| - enable_spec: Optional[bool] = False, |
248 |
| - ) -> "AquaContainerConfig": |
249 |
| - """ |
250 |
| - Creates an AquaContainerConfig instance from a container index JSON. |
251 |
| -
|
252 |
| - Parameters |
253 |
| - ---------- |
254 |
| - config (Optional[Dict]): The container index JSON. |
255 |
| - enable_spec (Optional[bool]): If True, fetch container specification details. |
256 |
| -
|
257 |
| - Returns |
258 |
| - ------- |
259 |
| - AquaContainerConfig: The constructed container configuration. |
260 |
| - """ |
261 |
| - # TODO: Return this logic back if necessary in the next iteraion. |
262 |
| - # if not config: |
263 |
| - # config = get_container_config() |
264 |
| - inference_items: Dict[str, AquaContainerConfigItem] = {} |
265 |
| - finetune_items: Dict[str, AquaContainerConfigItem] = {} |
266 |
| - evaluate_items: Dict[str, AquaContainerConfigItem] = {} |
267 |
| - |
268 |
| - for container_type, containers in config.items(): |
269 |
| - if isinstance(containers, list): |
270 |
| - for container in containers: |
271 |
| - platforms = container.get("platforms", []) |
272 |
| - model_formats = container.get("modelFormats", []) |
273 |
| - usages = container.get("usages", []) |
274 |
| - container_spec = ( |
275 |
| - config.get(ContainerSpec.CONTAINER_SPEC, {}).get( |
276 |
| - container_type, {} |
277 |
| - ) |
278 |
| - if enable_spec |
279 |
| - else None |
280 |
| - ) |
281 |
| - container_item = AquaContainerConfigItem( |
282 |
| - name=container.get("name", ""), |
283 |
| - version=container.get("version", ""), |
284 |
| - display_name=container.get( |
285 |
| - "displayName", container.get("version", "") |
286 |
| - ), |
287 |
| - family=container_type, |
288 |
| - platforms=platforms, |
289 |
| - model_formats=model_formats, |
290 |
| - usages=usages, |
291 |
| - spec=( |
292 |
| - AquaContainerConfigSpec( |
293 |
| - cli_param=container_spec.get( |
294 |
| - ContainerSpec.CLI_PARM, "" |
295 |
| - ), |
296 |
| - server_port=container_spec.get( |
297 |
| - ContainerSpec.SERVER_PORT, "" |
298 |
| - ), |
299 |
| - health_check_port=container_spec.get( |
300 |
| - ContainerSpec.HEALTH_CHECK_PORT, "" |
301 |
| - ), |
302 |
| - env_vars=container_spec.get(ContainerSpec.ENV_VARS, []), |
303 |
| - restricted_params=container_spec.get( |
304 |
| - ContainerSpec.RESTRICTED_PARAMS, [] |
305 |
| - ), |
306 |
| - ) |
307 |
| - if container_spec |
308 |
| - else None |
309 |
| - ), |
310 |
| - ) |
311 |
| - if container.get("type").lower() == "inference": |
312 |
| - inference_items[container_type] = container_item |
313 |
| - elif ( |
314 |
| - container.get("type").lower() == "fine-tune" |
315 |
| - or container_type == "odsc-llm-fine-tuning" |
316 |
| - ): |
317 |
| - finetune_items[container_type] = container_item |
318 |
| - elif ( |
319 |
| - container.get("type").lower() in ("evaluation", "evaluate") |
320 |
| - or container_type == "odsc-llm-evaluate" |
321 |
| - ): |
322 |
| - evaluate_items[container_type] = container_item |
323 |
| - |
324 |
| - return cls( |
325 |
| - inference=inference_items, finetune=finetune_items, evaluate=evaluate_items |
326 |
| - ) |
0 commit comments