Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Decouple read size from write size in DDB model #86

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 9 additions & 4 deletions service_capacity_modeling/models/org/netflix/ddb.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,11 +200,16 @@ def _get_write_consistency_percentages(
}


def _mean_item_size_bytes(desires: CapacityDesires) -> float:
def _mean_write_item_size_bytes(desires: CapacityDesires) -> float:
mean_item_size = desires.query_pattern.estimated_mean_write_size_bytes.mid
return mean_item_size


def _mean_read_item_size_bytes(desires: CapacityDesires) -> float:
mean_item_size = desires.query_pattern.estimated_mean_read_size_bytes.mid
return mean_item_size


def _get_dynamo_standard(context: RegionContext) -> Service:
number_of_regions = context.num_regions
dynamo_service = (
Expand Down Expand Up @@ -238,7 +243,7 @@ def _plan_writes(
desires: CapacityDesires,
extra_model_arguments: Dict[str, Any],
) -> _WritePlan:
mean_item_size = _mean_item_size_bytes(desires)
mean_item_size = _mean_write_item_size_bytes(desires)

# For items up to 1 KB in size,
# one WCU can perform one standard write request per second
Expand Down Expand Up @@ -305,7 +310,7 @@ def _plan_reads(
transactional_read_percent = read_percentages["transactional_read_percent"]
eventual_read_percent = read_percentages["eventual_read_percent"]
strong_read_percent = read_percentages["strong_read_percent"]
mean_item_size = _mean_item_size_bytes(desires)
mean_item_size = _mean_read_item_size_bytes(desires)

# items up to 4 KB in size
rounded_rcus_per_item = math.ceil(max(1.0, mean_item_size / (4 * 1024)))
Expand Down Expand Up @@ -377,7 +382,7 @@ def _plan_data_transfer(
return _DataTransferPlan(
total_data_transfer_gib=0, total_annual_data_transfer_cost=0
)
mean_item_size_bytes = _mean_item_size_bytes(desires)
mean_item_size_bytes = _mean_write_item_size_bytes(desires)
writes_per_second = desires.query_pattern.estimated_write_per_second.mid
# 31,536,000 seconds in a year (365 * 24 * 60 * 60)
# 1024 * 1024 * 1024 = 1Gib
Expand Down
2 changes: 2 additions & 0 deletions tests/netflix/test_ddb.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
),
estimated_write_per_second=certain_int(0),
estimated_mean_write_size_bytes=certain_int(5798),
estimated_mean_read_size_bytes=certain_int(5798),
),
data_shape=DataShape(
estimated_state_size_gib=Interval(low=10, mid=100, high=1000, confidence=0.98),
Expand All @@ -78,6 +79,7 @@
low=100, mid=1000, high=10000, confidence=0.98
),
estimated_mean_write_size_bytes=certain_int(5798),
estimated_mean_read_size_bytes=certain_int(5798),
),
data_shape=DataShape(
estimated_state_size_gib=Interval(low=10, mid=100, high=1000, confidence=0.98),
Expand Down
Loading