diff --git a/caso/record.py b/caso/record.py index 2ca04ff..eadd7ed 100644 --- a/caso/record.py +++ b/caso/record.py @@ -50,6 +50,8 @@ def ssm_message(self): class _ValidCloudStatus(str, enum.Enum): + """This is a private class to enum valid cloud statuses.""" + started = "started" completed = "completed" error = "error" @@ -59,6 +61,40 @@ class _ValidCloudStatus(str, enum.Enum): unknown = "unknown" +def map_cloud_fields(value: str) -> str: + """Map object fields to Cloud Accounting Record fields.""" + d = { + "uuid": "VMUUID", + "site_name": "SiteName", + "name": "MachineName", + "user_id": "LocalUserId", + "group_id": "LocalGroupId", + "fqan": "FQAN", + "status": "Status", + "start_time_epoch": "StartTime", + "end_time_epoch": "EndTime", + "suspend_duration": "SuspendDuration", + "wall_duration": "WallDuration", + "cpu_duration": "CpuDuration", + "cpu_count": "CpuCount", + "network_type": "NetworkType", + "network_in": "NetworkInbound", + "network_out": "NetworkOutbound", + "memory": "Memory", + "disk": "Disk", + "storage_record_id": "StorageRecordId", + "image_id": "ImageId", + "cloud_type": "CloudType", + "user_dn": "GlobalUserName", + "public_ip_count": "PublicIPCount", + "benchmark_value": "Benchmark", + "benchmark_type": "BenchmarkType", + "compute_service": "CloudComputeService", + "cloud_type": "CloudType", + } + return d.get(value, value) + + class CloudRecord(_BaseRecord): """The CloudRecord class holds information for each of the records. @@ -71,60 +107,116 @@ class CloudRecord(_BaseRecord): name: str user_id: str - user_dn: typing.Optional[str] + user_dn: typing.Optional[str] = None group_id: str fqan: str status: _ValidCloudStatus - image_id: typing.Optional[str] + image_id: typing.Optional[str] = None - public_ip_count = 0 + public_ip_count: int = 0 cpu_count: int memory: int disk: int - start_time: datetime.datetime - end_time: typing.Optional[datetime.datetime] + # Make these fields private, and deal with them as properties. This is done as all + # the accounting infrastructure needs start and end times as integers, but it is + # easier for us to maintain them as datetime objects internally. + _start_time: datetime.datetime + _end_time: typing.Optional[datetime.datetime] = None + + suspend_duration: typing.Optional[int] = None - # NOTE(aloga): due to the validation that we are doing below until - # https://github.com/samuelcolvin/pydantic/issues/935 - # and - # https://github.com/samuelcolvin/pydantic/pull/2625 - # are closed, we need to define the durations here, so do not move them - # around, otherwise we cannot access the needed fields - suspend_duration: typing.Optional[int] + _wall_duration: typing.Optional[int] = None + _cpu_duration: typing.Optional[int] = None - wall_duration: typing.Optional[int] - cpu_duration: typing.Optional[int] + benchmark_value: typing.Optional[float] = None + benchmark_type: typing.Optional[str] = None + + def __init__( + self, + start_time: datetime.datetime, + end_time: datetime.datetime, + *args, + **kwargs, + ): + """Initialize the record.""" + super(CloudRecord, self).__init__(*args, **kwargs) + + # Set start time and end times, see comment above. + self._start_time = start_time + self._end_time = end_time + + @property + def start_time(self) -> datetime.datetime: + """Get start time.""" + return self._start_time + + @start_time.setter + def start_time(self, start_time: datetime.datetime) -> None: + """Set start time.""" + self._start_time = start_time + + # NOTE(aloga): we need to specify an alias here, as per the following bug: + # https://github.com/pydantic/pydantic/issues/5825 + # This is needed for all computed fields. + @pydantic.computed_field(alias="StartTime") # type: ignore[misc] + @property + def start_time_epoch(self) -> int: + """Get start time as epoch.""" + return int(self._start_time.timestamp()) - benchmark_value: typing.Optional[float] - benchmark_type: typing.Optional[str] + @property + def end_time(self) -> typing.Optional[datetime.datetime]: + """Get end time.""" + if self._end_time is not None: + return self._end_time + else: + return None + + @end_time.setter + def end_time(self, end_time: datetime.datetime) -> None: + """Set end time.""" + self._end_time = end_time + + @pydantic.computed_field() # type: ignore[misc] + @property + def end_time_epoch(self) -> typing.Optional[int]: + """Get end time as epoch.""" + return int(self.end_time.timestamp()) - @classmethod - @pydantic.validator("wall_duration", always=True) - def _validate_wall_duration(cls, value, values): + @pydantic.computed_field() # type: ignore[misc] + @property + def wall_duration(self) -> typing.Optional[int]: + """Get wall duration.""" duration = None - if value is not None: - duration = value - elif values["end_time"]: - duration = values["end_time"] - values["self.start_time"] + if self._wall_duration is not None: + duration = self._wall_duration + elif self.end_time: + duration = self.end_time - self.start_time duration = int(duration.total_seconds()) return duration - @classmethod - @pydantic.validator("cpu_duration", always=True) - def _validate_cpu_duration(cls, value, values): + @wall_duration.setter + def wall_duration(self, wall: int) -> None: + """Set wall duration.""" + self._wall_duration = wall + + @pydantic.computed_field() # type: ignore[misc] + @property + def cpu_duration(self) -> typing.Optional[int]: + """Get CPU duration.""" duration = None - if value is not None: - duration = value - elif values["wall_duration"] is not None and values["cpu_count"]: - duration = values["wall_duration"] * values["cpu_count"] - duration = int(duration) + if self._cpu_duration is not None: + duration = self._cpu_duration + elif self.wall_duration is not None and self.cpu_count: + duration = self.wall_duration * self.cpu_count return duration - def set_cpu_duration(self, value: int): - """Set the CPU duration for the record.""" + @cpu_duration.setter + def cpu_duration(self, value: int) -> None: + """Set the CPU duration.""" self._cpu_duration = value def ssm_message(self): @@ -137,52 +229,33 @@ def ssm_message(self): # is just a dictionary representation of the object, where no serialization is # done. In order to get objects correctly serialized we need to convert to JSON, # then reload the model - serialized_record = json.loads(self.json(**opts)) + serialized_record = json.loads(self.model_dump_json(**opts)) aux = [f"{k}: {v}" for k, v in serialized_record.items()] aux.sort() return "\n".join(aux) - class Config: - """Config class for Pydantic.""" - - @staticmethod - def map_fields(value: str) -> str: - """Map object fields to Cloud Accounting Record fields.""" - d = { - "uuid": "VMUUID", - "site_name": "SiteName", - "name": "MachineName", - "user_id": "LocalUserId", - "group_id": "LocalGroupId", - "fqan": "FQAN", - "status": "Status", - "start_time": "StartTime", - "end_time": "EndTime", - "suspend_duration": "SuspendDuration", - "wall_duration": "WallDuration", - "cpu_duration": "CpuDuration", - "cpu_count": "CpuCount", - "network_type": "NetworkType", - "network_in": "NetworkInbound", - "network_out": "NetworkOutbound", - "memory": "Memory", - "disk": "Disk", - "storage_record_id": "StorageRecordId", - "image_id": "ImageId", - "cloud_type": "CloudType", - "user_dn": "GlobalUserName", - "public_ip_count": "PublicIPCount", - "benchmark_value": "Benchmark", - "benchmark_type": "BenchmarkType", - "compute_service": "CloudComputeService", - } - return d.get(value, value) - - json_encoders = {datetime.datetime: lambda v: int(v.timestamp())} - alias_generator = map_fields - allow_population_by_field_name = True - underscore_attrs_are_private = True - extra = "forbid" + model_config = dict( + alias_generator=map_cloud_fields, + populate_by_name=True, + extra="forbid", + ) + + +def map_ip_fields(field: str) -> str: + """Map object fields to accounting Public IP Usage record fields.""" + d = { + "measure_time_epoch": "MeasurementTime", + "site_name": "SiteName", + "cloud_type": "CloudType", + "user_id": "LocalUser", + "group_id": "LocalGroup", + "fqan": "FQAN", + "user_dn": "GlobalUserName", + "ip_version": "IPVersion", + "public_ip_count": "IPCount", + "compute_service": "CloudComputeService", + } + return d.get(field, field) class IPRecord(_BaseRecord): @@ -200,44 +273,73 @@ class IPRecord(_BaseRecord): group_id: str fqan: str - measure_time: datetime.datetime + # Make these fields private, and deal with them as properties. This is done as all + # the accounting infrastructure needs start and end times as integers, but it is + # easier for us to maintain them as datetime objects internally. + _measure_time: datetime.datetime ip_version: int public_ip_count: int + def __init__(self, measure_time: datetime.datetime, *args, **kwargs): + """Initialize the record.""" + super(IPRecord, self).__init__(*args, **kwargs) + + self._measure_time = measure_time + + @property + def measure_time(self) -> datetime.datetime: + """Get measurement time.""" + return self._measure_time + + @pydantic.computed_field() + @property + def measure_time_epoch(self) -> int: + """Get measurement time as epoch.""" + return int(self._measure_time.timestamp()) + + @measure_time.setter + def measure_time(self, measure_time: datetime.datetime) -> None: + """Set measurement time.""" + self._measure_time = measure_time + def ssm_message(self): """Render record as the expected SSM message.""" opts = { "by_alias": True, "exclude_none": True, } - return self.json(**opts) - - class Config: - """Config class for Pydantic.""" - - @staticmethod - def map_fields(field: str) -> str: - """Map object fields to accounting Public IP Usage record fields.""" - d = { - "measure_time": "MeasurementTime", - "site_name": "SiteName", - "cloud_type": "CloudType", - "user_id": "LocalUser", - "group_id": "LocalGroup", - "fqan": "FQAN", - "user_dn": "GlobalUserName", - "ip_version": "IPVersion", - "public_ip_count": "IPCount", - "compute_service": "CloudComputeService", - } - return d.get(field, field) - - json_encoders = {datetime.datetime: lambda v: int(v.timestamp())} - alias_generator = map_fields - allow_population_by_field_name = True - underscore_attrs_are_private = True - extra = "forbid" + return self.model_dump_json(**opts) + + model_config = dict( + alias_generator=map_ip_fields, + populate_by_name=True, + extra="forbid", + ) + + +def map_accelerator_fields(field: str) -> str: + """Map object fields to accounting Accelerator Usage Record fields.""" + d = { + "measurement_month": "MeasurementMonth", + "measurement_year": "MeasurementYear", + "associated_record_type": "AssociatedRecordType", + "uuid": "AccUUID", + "user_dn": "GlobalUserName", + "fqan": "FQAN", + "site_name": "SiteName", + "count": "Count", + "cores": "Cores", + "active_duration": "ActiveDuration", + "available_duration": "AvailableDuration", + "benchmark_type": "BenchmarkType", + "benchmark": "Benchmark", + "accelerator_type": "Type", + "model": "Model", + "compute_service": "CloudComputeService", + "cloud_type": "CloudType", + } + return d.get(field, field) class AcceleratorRecord(_BaseRecord): @@ -256,7 +358,7 @@ class AcceleratorRecord(_BaseRecord): count: int available_duration: int - _active_duration: typing.Optional[int] + _active_duration: typing.Optional[int] = None measurement_month: int measurement_year: int @@ -264,12 +366,13 @@ class AcceleratorRecord(_BaseRecord): associated_record_type: str = "cloud" accelerator_type: str - cores: typing.Optional[int] + cores: typing.Optional[int] = None model: str - benchmark_value: typing.Optional[float] - benchmark_type: typing.Optional[str] + benchmark_value: typing.Optional[float] = None + benchmark_type: typing.Optional[str] = None + @pydantic.computed_field # type: ignore[misc] @property def active_duration(self) -> int: """Get the active duration for the record (property).""" @@ -277,7 +380,8 @@ def active_duration(self) -> int: return self._active_duration return self.available_duration - def set_active_duration(self, value: int): + @active_duration.setter + def active_duration(self, value: int) -> None: """Set the active duration for the record.""" self._active_duration = value @@ -287,40 +391,37 @@ def ssm_message(self): "by_alias": True, "exclude_none": True, } - return self.json(**opts) - - class Config: - """Config class for Pydantic.""" - - @staticmethod - def map_fields(field: str) -> str: - """Map object fields to accounting Accelerator Usage Record fields.""" - d = { - "measurement_month": "MeasurementMonth", - "measurement_year": "MeasurementYear", - "associated_record_type": "AssociatedRecordType", - "uuid": "AccUUID", - "user_dn": "GlobalUserName", - "fqan": "FQAN", - "site_name": "SiteName", - "count": "Count", - "cores": "Cores", - "active_duration": "ActiveDuration", - "available_duration": "AvailableDuration", - "benchmark_type": "BenchmarkType", - "benchmark": "Benchmark", - "accelerator_type": "Type", - "model": "Model", - "cloud_type": "CloudType", - "compute_service": "CloudComputeService", - } - return d.get(field, field) - - json_encoders = {datetime.datetime: lambda v: int(v.timestamp())} - alias_generator = map_fields - allow_population_by_field_name = True - underscore_attrs_are_private = True - extra = "forbid" + return self.model_dump_json(**opts) + + model_config = dict( + alias_generator=map_accelerator_fields, + populate_by_name=True, + extra="forbid", + ) + + +def map_storage_fields(field: str) -> str: + """Map object fields to accounting EMI StAR record values.""" + d = { + "uuid": "VolumeUUID", + "name": "RecordName", + "user_id": "LocalUser", + "user_dn": "GlobalUserName", + "group_id": "LocalGroup", + "fqan": "FQAN", + "site_name": "SiteName", + "capacity": "Capacity", + "active_duration": "ActiveDuration", + "measure_time_epoch": "CreateTime", + "start_time_epoch": "StartTime", + "storage_type": "Type", + "status": "Status", + "attached_to": "AttachedTo", + "attached_duration": "AttachedDuration", + "compute_service": "CloudComputeService", + "cloud_type": "CloudType", + } + return d.get(field, field) class StorageRecord(_BaseRecord): @@ -336,28 +437,69 @@ class StorageRecord(_BaseRecord): name: str user_id: str - user_dn: typing.Optional[str] + user_dn: typing.Optional[str] = None group_id: str fqan: str active_duration: int - attached_duration: typing.Optional[float] - attached_to: typing.Optional[str] - measure_time: datetime.datetime - start_time: datetime.datetime + attached_duration: typing.Optional[float] = None + attached_to: typing.Optional[str] = None + + # Make these fields private, and deal with them as properties. This is done as all + # the accounting infrastructure needs start and end times as integers, but it is + # easier for us to maintain them as datetime objects internally. + _measure_time: datetime.datetime + _start_time: datetime.datetime storage_type: typing.Optional[str] = "Block Storage (cinder)" status: str capacity: int - # (aidaph) Fix the return to something different to 0 - @classmethod - @pydantic.validator("attached_duration", always=True) - def _validate_attached_duration(cls, value): - if value is not None: - return value - return 0 + def __init__( + self, + start_time: datetime.datetime, + measure_time: datetime.datetime, + *args, + **kwargs, + ): + """Initialize the record.""" + super(StorageRecord, self).__init__(*args, **kwargs) + + self._start_time = start_time + self._measure_time = measure_time + + @property + def start_time(self) -> datetime.datetime: + """Get start time.""" + return self._start_time + + @pydantic.computed_field() # type: ignore[misc] + @property + def start_time_epoch(self) -> int: + """Get start time as epoch.""" + return int(self._start_time.timestamp()) + + @start_time.setter + def start_time(self, start_time: datetime.datetime) -> None: + """Set start time.""" + self._start_time = start_time + + @property + def measure_time(self) -> datetime.datetime: + """Get measurement time.""" + return self._measure_time + + @pydantic.computed_field() # type: ignore[misc] + @property + def measure_time_epoch(self) -> int: + """Get measurement time as epoch.""" + return int(self._measure_time.timestamp()) + + @measure_time.setter + def measure_time(self, measure_time: datetime.datetime) -> None: + """Set measurement time.""" + self._measure_time = measure_time def ssm_message(self): """Render record as the expected SSM message.""" @@ -386,34 +528,8 @@ def ssm_message(self): ETree.SubElement(sr, "sr:ResourceCapacityUsed").text = capacity return ETree.tostring(sr) - class Config: - """Config class for Pydantic.""" - - @staticmethod - def map_fields(field: str) -> str: - """Map object fields to accounting EMI StAR record values.""" - d = { - "measure_time": "CreateTime", - "uuid": "VolumeUUID", - "name": "RecordName", - "user_id": "LocalUser", - "user_dn": "GlobalUserName", - "group_id": "LocalGroup", - "fqan": "FQAN", - "site_name": "SiteName", - "capacity": "Capacity", - "active_duration": "ActiveDuration", - "start_time": "StartTime", - "storage_type": "Type", - "status": "Status", - "attached_to": "AttachedTo", - "attached_duration": "AttachedDuration", - "cloud_type": "CloudType", - "compute_service": "CloudComputeService", - } - return d.get(field, field) - - alias_generator = map_fields - allow_population_by_field_name = True - underscore_attrs_are_private = True - extra = "forbid" + model_config = dict( + alias_generator=map_storage_fields, + populate_by_name=True, + extra="forbid", + ) diff --git a/caso/tests/conftest.py b/caso/tests/conftest.py index 9ca82cd..2e25861 100644 --- a/caso/tests/conftest.py +++ b/caso/tests/conftest.py @@ -208,6 +208,7 @@ "AccUUID": "99cf5d02-a573-46a1-b90d-0f7327126876", "AssociatedRecordType": "cloud", "AvailableDuration": 5000, + "ActiveDuration": 5000, "Count": 3, "FQAN": "VO 1 FQAN", "GlobalUserName": "d4e547e6f298fe34389@foobar.eu", @@ -223,6 +224,7 @@ "AccUUID": "99cf5d02-a573-46a1-b90d-0f7327126876", "AssociatedRecordType": "cloud", "AvailableDuration": 5000, + "ActiveDuration": 5000, "Count": 30, "FQAN": "VO 1 FQAN", "GlobalUserName": "d4e547e6f298fe34389@foobar.eu", @@ -281,8 +283,8 @@ "LocalGroup": "313c6f62-e05f-4ec7-b0f2-256612db18f5", "FQAN": "VO 1 FQAN", "ActiveDuration": 400, - "CreateTime": "2023-05-25T21:59:06+00:00", - "StartTime": "2023-05-20T21:59:06+00:00", + "CreateTime": 1685051946, + "StartTime": 1684619946, "Type": "Block Storage (cinder)", "Status": "in-use", "Capacity": 322122547200, @@ -298,8 +300,8 @@ "LocalGroup": "313c6f62-e05f-4ec7-b0f2-256612db18f5", "FQAN": "VO 2 FQAN", "ActiveDuration": 400, - "CreateTime": "2023-05-25T21:59:06+00:00", - "StartTime": "2023-05-20T21:59:06+00:00", + "CreateTime": 1685051946, + "StartTime": 1684533546, "Type": "Block Storage (cinder)", "Status": "in-use", "Capacity": 122122547200, @@ -309,44 +311,39 @@ # Cloud Record fixtures -@pytest.fixture(scope="module") +@pytest.fixture() def cloud_record() -> caso.record.CloudRecord: """Get a fixture for the CloudRecord.""" record = caso.record.CloudRecord(**valid_cloud_records_fields[0]) - # Remove this when moving to Pydantic 2 - record.wall_duration = 432000 - record.cpu_duration = 3456000 return record -@pytest.fixture(scope="module") +@pytest.fixture() def another_cloud_record() -> caso.record.CloudRecord: """Get another fixture for the CloudRecord.""" record = caso.record.CloudRecord(**valid_cloud_records_fields[1]) - record.wall_duration = 432000 - record.cpu_duration = 3456000 return record -@pytest.fixture(scope="module") +@pytest.fixture() def valid_cloud_record() -> dict: """Get a fixture for a valid record.""" return valid_cloud_records_dict[0] -@pytest.fixture(scope="module") +@pytest.fixture() def valid_cloud_records() -> typing.List[dict]: """Get a fixture for valid records as a dict.""" return valid_cloud_records_dict -@pytest.fixture(scope="module") +@pytest.fixture() def another_valid_cloud_record() -> dict: """Get another fixture for a valid record as a dict.""" return valid_cloud_records_dict[0] -@pytest.fixture(scope="module") +@pytest.fixture() def cloud_record_list( cloud_record, another_cloud_record ) -> typing.List[caso.record.CloudRecord]: @@ -357,39 +354,39 @@ def cloud_record_list( # IP record fixtures -@pytest.fixture(scope="module") +@pytest.fixture() def ip_record() -> caso.record.IPRecord: """Get a fixture for an IP record.""" record = caso.record.IPRecord(**valid_ip_records_fields[0]) return record -@pytest.fixture(scope="module") +@pytest.fixture() def another_ip_record() -> caso.record.IPRecord: """Get another fixture for an IP record.""" record = caso.record.IPRecord(**valid_ip_records_fields[1]) return record -@pytest.fixture(scope="module") +@pytest.fixture() def valid_ip_record() -> dict: """Get a fixture for a valid IP record as a dict.""" return valid_ip_records_dict[0] -@pytest.fixture(scope="module") +@pytest.fixture() def valid_ip_records() -> typing.List[dict]: """Get a fixture for all IP records as a dict.""" return valid_ip_records_dict -@pytest.fixture(scope="module") +@pytest.fixture() def another_valid_ip_record() -> dict: """Get another fixture for an IP record as a dict.""" return valid_ip_records_dict[1] -@pytest.fixture(scope="module") +@pytest.fixture() def ip_record_list(ip_record, another_ip_record) -> typing.List[caso.record.IPRecord]: """Get a fixture for a list of IP records.""" return [ip_record, another_ip_record] @@ -398,33 +395,33 @@ def ip_record_list(ip_record, another_ip_record) -> typing.List[caso.record.IPRe # Accelerator records -@pytest.fixture(scope="module") +@pytest.fixture() def accelerator_record() -> caso.record.AcceleratorRecord: """Get a fixture for the AcceleratorRecord.""" record = caso.record.AcceleratorRecord(**valid_accelerator_records_fields[0]) return record -@pytest.fixture(scope="module") +@pytest.fixture() def another_accelerator_record() -> caso.record.AcceleratorRecord: """Get another fixture for the AcceleratorRecord.""" record = caso.record.AcceleratorRecord(**valid_accelerator_records_fields[1]) return record -@pytest.fixture(scope="module") +@pytest.fixture() def valid_accelerator_record() -> dict: """Get a fixture for a valid record.""" return valid_accelerator_records_dict[0] -@pytest.fixture(scope="module") +@pytest.fixture() def valid_accelerator_records() -> typing.List[dict]: """Get a fixture for valid records as a dict.""" return valid_accelerator_records_dict -@pytest.fixture(scope="module") +@pytest.fixture() def accelerator_record_list( accelerator_record, another_accelerator_record ) -> typing.List[caso.record.AcceleratorRecord]: @@ -435,33 +432,33 @@ def accelerator_record_list( # Storage records -@pytest.fixture(scope="module") +@pytest.fixture() def storage_record() -> caso.record.StorageRecord: """Get a fixture for the StorageRecord.""" record = caso.record.StorageRecord(**valid_storage_records_fields[0]) return record -@pytest.fixture(scope="module") +@pytest.fixture() def another_storage_record() -> caso.record.StorageRecord: """Get another fixture for the StorageRecord.""" record = caso.record.StorageRecord(**valid_storage_records_fields[1]) return record -@pytest.fixture(scope="module") +@pytest.fixture() def valid_storage_record() -> dict: """Get a fixture for a valid record.""" return valid_storage_records_dict[0] -@pytest.fixture(scope="module") +@pytest.fixture() def valid_storage_records() -> typing.List[dict]: """Get a fixture for valid records as a dict.""" return valid_storage_records_dict -@pytest.fixture(scope="module") +@pytest.fixture() def storage_record_list( storage_record, another_storage_record ) -> typing.List[caso.record.StorageRecord]: @@ -498,7 +495,7 @@ def expected_entries_cloud() -> typing.List[str]: "CloudComputeService: Fake Cloud Service\n" f"CloudType: {cloud_type}\n" "CpuCount: 8\n" - "CpuDuration: 3456000\n" + "CpuDuration: 4147200\n" "Disk: 250\n" "EndTime: 1685051946\n" "FQAN: VO 2 FQAN\n" @@ -513,7 +510,7 @@ def expected_entries_cloud() -> typing.List[str]: "StartTime: 1684533546\n" "Status: completed\n" "VMUUID: a53738e1-13eb-4047-800c-067d14ce3d22\n" - "WallDuration: 432000", + "WallDuration: 518400", ] return ssm_entries @@ -535,14 +532,14 @@ def expected_message_cloud() -> str: "WallDuration: 432000\n" "%%" "\nCloudComputeService: Fake Cloud Service\n" - f"CloudType: {cloud_type}\nCpuCount: 8\nCpuDuration: 3456000\n" + f"CloudType: {cloud_type}\nCpuCount: 8\nCpuDuration: 4147200\n" "Disk: 250\nEndTime: 1685051946\nFQAN: VO 2 FQAN\nGlobalUserName: User DN\n" "ImageId: b39a8ed9-e15d-4b71-ada2-daf88efbac0a\n" "LocalGroupId: 03b6a6c4-cf2b-48b9-82f1-69c52b9f30af\n" "LocalUserId: a4519d7d-f60a-4908-9d63-7d9e17422188\nMachineName: VM Name 2\n" "Memory: 16\nPublicIPCount: 7\nSiteName: TEST-Site\nStartTime: 1684533546\n" "Status: completed\nVMUUID: a53738e1-13eb-4047-800c-067d14ce3d22\n" - "WallDuration: 432000\n" + "WallDuration: 518400\n" ) return message.encode("utf-8") @@ -551,28 +548,28 @@ def expected_message_cloud() -> str: def expected_entries_ip() -> typing.List[str]: """Get a fixture for all IP entries.""" ssm_entries = [ - '{"SiteName": "TEST-Site", ' - f'"CloudType": "{cloud_type}", ' - '"CloudComputeService": "Fake Cloud Service", ' - '"uuid": "e3c5aeef-37b8-4332-ad9f-9d068f156dc2", ' - '"LocalUser": "a4519d7d-f60a-4908-9d63-7d9e17422188", ' - '"GlobalUserName": "User 1 DN", ' - '"LocalGroup": "03b6a6c4-cf2b-48b9-82f1-69c52b9f30af", ' - '"FQAN": "VO 1 FQAN", ' - '"MeasurementTime": 1685051946, ' - '"IPVersion": 4, ' - '"IPCount": 10}', - '{"SiteName": "TEST-Site", ' - f'"CloudType": "{cloud_type}", ' - '"CloudComputeService": "Fake Cloud Service", ' - '"uuid": "5c50720e-a653-4d70-9b0e-d4388687fcbc", ' - '"LocalUser": "3391a44e-3728-478d-abde-b86c25356571", ' - '"GlobalUserName": "User 2 DN", ' - '"LocalGroup": "2dae43c4-1889-4e63-b172-d4e99381e30a", ' - '"FQAN": "VO 2 FQAN", ' - '"MeasurementTime": 1685051946, ' - '"IPVersion": 6, ' - '"IPCount": 20}', + '{"SiteName":"TEST-Site",' + f'"CloudType":"{cloud_type}",' + '"CloudComputeService":"Fake Cloud Service",' + '"uuid":"e3c5aeef-37b8-4332-ad9f-9d068f156dc2",' + '"LocalUser":"a4519d7d-f60a-4908-9d63-7d9e17422188",' + '"GlobalUserName":"User 1 DN",' + '"LocalGroup":"03b6a6c4-cf2b-48b9-82f1-69c52b9f30af",' + '"FQAN":"VO 1 FQAN",' + '"IPVersion":4,' + '"IPCount":10,' + '"MeasurementTime":1685051946}', + '{"SiteName":"TEST-Site",' + f'"CloudType":"{cloud_type}",' + '"CloudComputeService":"Fake Cloud Service",' + '"uuid":"5c50720e-a653-4d70-9b0e-d4388687fcbc",' + '"LocalUser":"3391a44e-3728-478d-abde-b86c25356571",' + '"GlobalUserName":"User 2 DN",' + '"LocalGroup":"2dae43c4-1889-4e63-b172-d4e99381e30a",' + '"FQAN":"VO 2 FQAN",' + '"IPVersion":6,' + '"IPCount":20,' + '"MeasurementTime":1685051946}', ] return ssm_entries @@ -590,9 +587,9 @@ def expected_message_ip() -> str: '"GlobalUserName": "User 1 DN", ' '"LocalGroup": "03b6a6c4-cf2b-48b9-82f1-69c52b9f30af", ' '"FQAN": "VO 1 FQAN", ' - '"MeasurementTime": 1685051946, ' '"IPVersion": 4, ' - '"IPCount": 10}, ' + '"IPCount": 10, ' + '"MeasurementTime": 1685051946}, ' '{"SiteName": "TEST-Site", ' f'"CloudType": "{cloud_type}", ' '"CloudComputeService": "Fake Cloud Service", ' @@ -601,9 +598,9 @@ def expected_message_ip() -> str: '"GlobalUserName": "User 2 DN", ' '"LocalGroup": "2dae43c4-1889-4e63-b172-d4e99381e30a", ' '"FQAN": "VO 2 FQAN", ' - '"MeasurementTime": 1685051946, ' '"IPVersion": 6, ' - '"IPCount": 20}' + '"IPCount": 20, ' + '"MeasurementTime": 1685051946}' "]}" ) return message @@ -613,32 +610,34 @@ def expected_message_ip() -> str: def expected_entries_accelerator() -> typing.List[str]: """Get a fixture for all accelerator entries.""" ssm_entries = [ - '{"SiteName": "TEST-Site", ' - f'"CloudType": "{cloud_type}", ' - '"CloudComputeService": "Fake Cloud Service", ' - '"AccUUID": "99cf5d02-a573-46a1-b90d-0f7327126876", ' - '"GlobalUserName": "d4e547e6f298fe34389@foobar.eu", ' - '"FQAN": "VO 1 FQAN", ' - '"Count": 3, ' - '"AvailableDuration": 5000, ' - '"MeasurementMonth": 6, ' - '"MeasurementYear": 2022, ' - '"AssociatedRecordType": "cloud", ' - '"Type": "GPU", ' - '"Model": "Foobar A200"}', - '{"SiteName": "TEST-Site", ' - f'"CloudType": "{cloud_type}", ' - '"CloudComputeService": "Fake Cloud Service", ' - '"AccUUID": "99cf5d02-a573-46a1-b90d-0f7327126876", ' - '"GlobalUserName": "d4e547e6f298fe34389@foobar.eu", ' - '"FQAN": "VO 1 FQAN", ' - '"Count": 30, ' - '"AvailableDuration": 5000, ' - '"MeasurementMonth": 2, ' - '"MeasurementYear": 2022, ' - '"AssociatedRecordType": "cloud", ' - '"Type": "GPU", ' - '"Model": "Foobar A300"}', + '{"SiteName":"TEST-Site",' + f'"CloudType":"{cloud_type}",' + '"CloudComputeService":"Fake Cloud Service",' + '"AccUUID":"99cf5d02-a573-46a1-b90d-0f7327126876",' + '"GlobalUserName":"d4e547e6f298fe34389@foobar.eu",' + '"FQAN":"VO 1 FQAN",' + '"Count":3,' + '"AvailableDuration":5000,' + '"MeasurementMonth":6,' + '"MeasurementYear":2022,' + '"AssociatedRecordType":"cloud",' + '"Type":"GPU",' + '"Model":"Foobar A200",' + '"ActiveDuration":5000}', + '{"SiteName":"TEST-Site",' + f'"CloudType":"{cloud_type}",' + '"CloudComputeService":"Fake Cloud Service",' + '"AccUUID":"99cf5d02-a573-46a1-b90d-0f7327126876",' + '"GlobalUserName":"d4e547e6f298fe34389@foobar.eu",' + '"FQAN":"VO 1 FQAN",' + '"Count":30,' + '"AvailableDuration":5000,' + '"MeasurementMonth":2,' + '"MeasurementYear":2022,' + '"AssociatedRecordType":"cloud",' + '"Type":"GPU",' + '"Model":"Foobar A300",' + '"ActiveDuration":5000}', ] return ssm_entries @@ -661,7 +660,8 @@ def expected_message_accelerator() -> str: '"MeasurementYear": 2022, ' '"AssociatedRecordType": "cloud", ' '"Type": "GPU", ' - '"Model": "Foobar A200"}, ' + '"Model": "Foobar A200", ' + '"ActiveDuration": 5000}, ' '{"SiteName": "TEST-Site", ' f'"CloudType": "{cloud_type}", ' '"CloudComputeService": "Fake Cloud Service", ' @@ -674,7 +674,8 @@ def expected_message_accelerator() -> str: '"MeasurementYear": 2022, ' '"AssociatedRecordType": "cloud", ' '"Type": "GPU", ' - '"Model": "Foobar A300"}' + '"Model": "Foobar A300", ' + '"ActiveDuration": 5000}' "]}" ) return message diff --git a/caso/tests/test_record.py b/caso/tests/test_record.py index f07d386..bf605da 100644 --- a/caso/tests/test_record.py +++ b/caso/tests/test_record.py @@ -19,10 +19,7 @@ import datetime import json -import pytest - -@pytest.mark.skip(reason="Pydantic 1 does not support computed fields") def test_cloud_record(cloud_record): """Test a cloud record is correctly generated.""" wall = datetime.timedelta(days=5).total_seconds() @@ -33,9 +30,10 @@ def test_cloud_record(cloud_record): assert isinstance(cloud_record.start_time, datetime.datetime) assert isinstance(cloud_record.end_time, datetime.datetime) + assert isinstance(cloud_record.start_time_epoch, int) + assert isinstance(cloud_record.end_time_epoch, int) -@pytest.mark.skip(reason="Pydantic 1 does not support computed fields") def test_cloud_record_map_opts(cloud_record, valid_cloud_record): """Test a cloud record is correctly rendered.""" opts = { @@ -43,7 +41,7 @@ def test_cloud_record_map_opts(cloud_record, valid_cloud_record): "exclude_none": True, } - assert json.loads(cloud_record.json(**opts)) == valid_cloud_record + assert json.loads(cloud_record.model_dump_json(**opts)) == valid_cloud_record def test_cloud_record_map_opts_custom_wall_cpu(cloud_record, valid_cloud_record): @@ -58,10 +56,9 @@ def test_cloud_record_map_opts_custom_wall_cpu(cloud_record, valid_cloud_record) "exclude_none": True, } - assert json.loads(cloud_record.json(**opts)) == valid_cloud_record + assert json.loads(cloud_record.model_dump_json(**opts)) == valid_cloud_record -@pytest.mark.skip(reason="Pydantic 1 does not support computed fields") def test_cloud_record_custom_wall(cloud_record): """Test a cloud record is correctly rendered with custom wall time.""" wall = 200 @@ -82,7 +79,6 @@ def test_cloud_record_custom_wall_cpu(cloud_record): assert cloud_record.cpu_duration == cpu -@pytest.mark.skip(reason="Pydantic 1 does not support computed fields") def test_cloud_record_custom_cpu(cloud_record): """Test a cloud record is correctly rendered with custom cpu time.""" wall = datetime.timedelta(days=5).total_seconds() @@ -104,7 +100,7 @@ def test_ip_record_map_opts(ip_record, valid_ip_record): "exclude_none": True, } - assert json.loads(ip_record.json(**opts)) == valid_ip_record + assert json.loads(ip_record.model_dump_json(**opts)) == valid_ip_record def test_accelerator_record(accelerator_record): @@ -118,7 +114,10 @@ def test_accelerator_record_map_opts(accelerator_record, valid_accelerator_recor "by_alias": True, "exclude_none": True, } - assert json.loads(accelerator_record.json(**opts)) == valid_accelerator_record + assert ( + json.loads(accelerator_record.model_dump_json(**opts)) + == valid_accelerator_record # noqa + ) def test_storage_record(storage_record): @@ -132,4 +131,4 @@ def test_storage_record_map_opts(storage_record, valid_storage_record): "by_alias": True, "exclude_none": True, } - assert json.loads(storage_record.json(**opts)) == valid_storage_record + assert json.loads(storage_record.model_dump_json(**opts)) == valid_storage_record diff --git a/caso/tests/test_ssm.py b/caso/tests/test_ssm.py index 52e926f..6d622f2 100644 --- a/caso/tests/test_ssm.py +++ b/caso/tests/test_ssm.py @@ -176,6 +176,8 @@ def test_complete_accelerator_message( """Test a complete cloud message.""" def mock_add(message): + print(message) + print(expected_message_accelerator) assert message == expected_message_accelerator with monkeypatch.context() as m: diff --git a/requirements.txt b/requirements.txt index 4667ee5..2e02f3c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,4 +20,4 @@ python-neutronclient>=6.7.0 # Apache-2.0 keystoneauth1>=3.4.0 # Apache-2.0 stevedore -pydantic +pydantic>=2.0.0