Skip to content

Commit c0cea60

Browse files
authored
Bump pydantic to 2.8.2 (#561)
* Bump pydantic to 2.8.2 * Run pydantic-bump * More updates * Fix openapi generation * Fix tests * black formatting * Fix integration tests
1 parent 3ff1196 commit c0cea60

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+373
-343
lines changed

.circleci/config.yml

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ jobs:
7474
resource_class: small
7575
parallelism: 1
7676
steps:
77-
- add_ssh_keys: # gives write access to CircleCI worker
77+
- add_ssh_keys: # gives write access to CircleCI worker
7878
fingerprints:
7979
- "76:0c:1b:9e:e3:6a:c3:5c:6f:24:91:ef:7c:54:d2:7a"
8080
- checkout # checkout source code to working directory
@@ -157,10 +157,10 @@ jobs:
157157
DOCKER_BUILDKIT=1 docker build -f model-engine/model_engine_server/inference/pytorch_or_tf.user.Dockerfile \
158158
--build-arg BASE_IMAGE=temp:1.7.1-cuda11.0-cudnn8-runtime-$CIRCLE_SHA1 \
159159
--build-arg REQUIREMENTS_FILE="$CIRCLE_SHA1-requirements.txt" \
160-
-t $CIRCLECI_AWS_ACCOUNT_ID.dkr.ecr.us-west-2.amazonaws.com/hosted-model-inference/async-pytorch:1.7.1-cuda11.0-cudnn8-runtime-$CIRCLE_SHA1-021694 .
160+
-t $CIRCLECI_AWS_ACCOUNT_ID.dkr.ecr.us-west-2.amazonaws.com/hosted-model-inference/async-pytorch:1.7.1-cuda11.0-cudnn8-runtime-$CIRCLE_SHA1-b8c25b .
161161
rm $CIRCLE_SHA1-requirements.txt
162162
163-
minikube --logtostderr -v 1 image load $CIRCLECI_AWS_ACCOUNT_ID.dkr.ecr.us-west-2.amazonaws.com/hosted-model-inference/async-pytorch:1.7.1-cuda11.0-cudnn8-runtime-$CIRCLE_SHA1-021694
163+
minikube --logtostderr -v 1 image load $CIRCLECI_AWS_ACCOUNT_ID.dkr.ecr.us-west-2.amazonaws.com/hosted-model-inference/async-pytorch:1.7.1-cuda11.0-cudnn8-runtime-$CIRCLE_SHA1-b8c25b
164164
- run:
165165
name: Install helm chart
166166
command: |
@@ -207,23 +207,23 @@ commands:
207207
install_server:
208208
description: Installs LLM Engine server
209209
steps:
210-
- python/install-packages:
211-
pkg-manager: pip
212-
app-dir: model-engine
213-
- python/install-packages:
214-
pkg-manager: pip
215-
app-dir: model-engine
216-
pip-dependency-file: requirements-test.txt
217-
- python/install-packages:
218-
pkg-manager: pip
219-
app-dir: model-engine
220-
pip-dependency-file: requirements_override.txt
221-
- run:
222-
name: Install Server
223-
command: |
224-
pushd model-engine
225-
pip install -e .
226-
popd
210+
- python/install-packages:
211+
pkg-manager: pip
212+
app-dir: model-engine
213+
- python/install-packages:
214+
pkg-manager: pip
215+
app-dir: model-engine
216+
pip-dependency-file: requirements-test.txt
217+
- python/install-packages:
218+
pkg-manager: pip
219+
app-dir: model-engine
220+
pip-dependency-file: requirements_override.txt
221+
- run:
222+
name: Install Server
223+
command: |
224+
pushd model-engine
225+
pip install -e .
226+
popd
227227
install_client:
228228
description: Install LLM Engine client
229229
steps:

clients/python/llmengine/data_types.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,10 @@
66
from enum import Enum
77
from typing import Any, Dict, List, Literal, Optional, Union
88

9-
import pydantic
9+
from pydantic.version import VERSION as PYDANTIC_VERSION
1010

11-
if int(pydantic.__version__.split(".")[0]) > 1:
11+
PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
12+
if PYDANTIC_V2:
1213
from pydantic.v1 import BaseModel, Field, HttpUrl
1314
else:
1415
from pydantic import BaseModel, Field, HttpUrl # type: ignore

integration_tests/rest_api_utils.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,12 @@ def my_model(**keyword_args):
5959
"framework_type": "pytorch",
6060
"pytorch_image_tag": "1.7.1-cuda11.0-cudnn8-runtime",
6161
},
62-
"requirements": ["cloudpickle==2.1.0", "pyyaml==6.0"],
62+
"requirements": [
63+
"cloudpickle==2.1.0",
64+
"pyyaml==6.0",
65+
"pydantic==2.8.2",
66+
"fastapi==0.110.0",
67+
],
6368
"location": "s3://model-engine-integration-tests/model_bundles/echo_bundle",
6469
},
6570
}

integration_tests/test_endpoints.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,7 @@ def test_sync_streaming_model_endpoint(capsys):
232232
for response in task_responses:
233233
assert (
234234
response.strip()
235-
== 'data: {"status": "SUCCESS", "result": {"result": {"y": 1}}, "traceback": null}'
235+
== 'data: {"status":"SUCCESS","result":{"result":{"y":1}},"traceback":null}'
236236
)
237237
finally:
238238
delete_model_endpoint(create_endpoint_request["name"], user)

model-engine/model_engine_server/common/dtos/batch_jobs.py

Lines changed: 23 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -13,20 +13,21 @@
1313
GpuType,
1414
StorageSpecificationType,
1515
)
16-
from pydantic import BaseModel, root_validator
16+
from pydantic import BaseModel, ConfigDict, model_validator
1717

1818

1919
class CreateBatchJobResourceRequests(BaseModel):
20-
cpus: Optional[CpuSpecificationType]
21-
memory: Optional[StorageSpecificationType]
22-
gpus: Optional[int]
23-
gpu_type: Optional[GpuType]
24-
storage: Optional[StorageSpecificationType]
25-
max_workers: Optional[int]
26-
per_worker: Optional[int]
20+
cpus: Optional[CpuSpecificationType] = None
21+
memory: Optional[StorageSpecificationType] = None
22+
gpus: Optional[int] = None
23+
gpu_type: Optional[GpuType] = None
24+
storage: Optional[StorageSpecificationType] = None
25+
max_workers: Optional[int] = None
26+
per_worker: Optional[int] = None
2727

2828

2929
class CreateBatchJobV1Request(BaseModel):
30+
model_config = ConfigDict(protected_namespaces=())
3031
model_bundle_id: str
3132
input_path: str
3233
serialization_format: BatchJobSerializationFormat
@@ -41,10 +42,10 @@ class CreateBatchJobV1Response(BaseModel):
4142

4243
class GetBatchJobV1Response(BaseModel):
4344
status: BatchJobStatus
44-
result: Optional[str]
45+
result: Optional[str] = None
4546
duration: timedelta
46-
num_tasks_pending: Optional[int]
47-
num_tasks_completed: Optional[int]
47+
num_tasks_pending: Optional[int] = None
48+
num_tasks_completed: Optional[int] = None
4849

4950

5051
class UpdateBatchJobV1Request(BaseModel):
@@ -64,9 +65,7 @@ class CreateDockerImageBatchJobResourceRequests(BaseModel):
6465
gpus: Optional[int] = None
6566
gpu_type: Optional[GpuType] = None
6667
storage: Optional[StorageSpecificationType] = None
67-
68-
class Config:
69-
orm_mode = True
68+
model_config = ConfigDict(from_attributes=True)
7069

7170
@classmethod
7271
def merge_requests(
@@ -93,7 +92,7 @@ def common_requests(
9392
class CreateDockerImageBatchJobV1Request(BaseModel):
9493
docker_image_batch_job_bundle_name: Optional[str] = None
9594
docker_image_batch_job_bundle_id: Optional[str] = None
96-
job_config: Optional[Dict[str, Any]]
95+
job_config: Optional[Dict[str, Any]] = None
9796
# TODO also expose a separate argument to pass an s3file to the job, as opposed to job_config
9897
labels: Dict[str, str] # TODO this probably should go in the bundle
9998

@@ -103,7 +102,7 @@ class CreateDockerImageBatchJobV1Request(BaseModel):
103102

104103
override_job_max_runtime_s: Optional[int] = None
105104

106-
@root_validator
105+
@model_validator(mode="before")
107106
def exactly_one_name_or_id(cls, values):
108107
bundle_name = values.get("docker_image_batch_job_bundle_name")
109108
bundle_id = values.get("docker_image_batch_job_bundle_id")
@@ -166,16 +165,14 @@ class DockerImageBatchJobBundleV1Response(BaseModel):
166165
image_tag: str
167166
command: List[str]
168167
env: Dict[str, str]
169-
mount_location: Optional[str]
170-
cpus: Optional[str]
171-
memory: Optional[str]
172-
storage: Optional[str]
173-
gpus: Optional[int]
174-
gpu_type: Optional[str]
175-
public: Optional[bool]
176-
177-
class Config:
178-
orm_mode = True
168+
mount_location: Optional[str] = None
169+
cpus: Optional[str] = None
170+
memory: Optional[str] = None
171+
storage: Optional[str] = None
172+
gpus: Optional[int] = None
173+
gpu_type: Optional[str] = None
174+
public: Optional[bool] = None
175+
model_config = ConfigDict(from_attributes=True)
179176

180177

181178
class ListDockerImageBatchJobBundleV1Response(BaseModel):
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
from pydantic import BaseModel, BeforeValidator, ConfigDict, HttpUrl, TypeAdapter
2+
from typing_extensions import Annotated
3+
4+
# See: https://github.com/pydantic/pydantic/issues/7186
5+
# pydantic v2 doesn't treat HttpUrl the same way as in v1 which causes various issue
6+
# This is an attempt to make it behave as similar as possible
7+
HttpUrlTypeAdapter = TypeAdapter(HttpUrl)
8+
HttpUrlStr = Annotated[
9+
str,
10+
BeforeValidator(lambda value: HttpUrlTypeAdapter.validate_python(value) and value),
11+
]
12+
13+
14+
class LLMEngineModel(BaseModel):
15+
"""Common pydantic configurations for model engine"""
16+
17+
model_config = ConfigDict(protected_namespaces=())

model-engine/model_engine_server/common/dtos/docker_repository.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@ class BuildImageRequest(BaseModel):
1010
base_path: str
1111
dockerfile: str
1212
base_image: str
13-
requirements_folder: Optional[str]
14-
substitution_args: Optional[Dict[str, str]]
13+
requirements_folder: Optional[str] = None
14+
substitution_args: Optional[Dict[str, str]] = None
1515

1616

1717
class BuildImageResponse(BaseModel):

model-engine/model_engine_server/common/dtos/endpoint_builder.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,19 +20,19 @@ class BuildEndpointRequest(BaseModel):
2020
cpus: CpuSpecificationType
2121
gpus: int
2222
memory: StorageSpecificationType
23-
gpu_type: Optional[GpuType]
24-
storage: Optional[StorageSpecificationType]
23+
gpu_type: Optional[GpuType] = None
24+
storage: Optional[StorageSpecificationType] = None
2525
optimize_costs: bool
2626
aws_role: str
2727
results_s3_bucket: str
28-
child_fn_info: Optional[Dict[str, Any]] # TODO: remove this if we don't need it.
29-
post_inference_hooks: Optional[List[str]]
28+
child_fn_info: Optional[Dict[str, Any]] = None # TODO: remove this if we don't need it.
29+
post_inference_hooks: Optional[List[str]] = None
3030
labels: Dict[str, str]
31-
billing_tags: Optional[Dict[str, Any]]
31+
billing_tags: Optional[Dict[str, Any]] = None
3232
prewarm: bool = True
33-
high_priority: Optional[bool]
34-
default_callback_url: Optional[str]
35-
default_callback_auth: Optional[CallbackAuth]
33+
high_priority: Optional[bool] = None
34+
default_callback_url: Optional[str] = None
35+
default_callback_auth: Optional[CallbackAuth] = None
3636

3737

3838
class BuildEndpointStatus(str, Enum):

0 commit comments

Comments
 (0)