Skip to content

Commit

Permalink
Ensure "turned off" engines are still consdered during planning (#201)
Browse files Browse the repository at this point in the history
* Special case base provisioning with zero nodes

* Minor fix
  • Loading branch information
geoffxy authored Jul 19, 2023
1 parent f0bfe15 commit 43a0c3c
Show file tree
Hide file tree
Showing 7 changed files with 72 additions and 17 deletions.
2 changes: 2 additions & 0 deletions src/brad/planner/beam/query_based.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,7 @@ async def run_replan(self) -> None:
aurora_enumerator.scaling_to_distance(
ctx.current_blueprint.aurora_provisioning(),
ctx.planner_config.max_provisioning_multiplier(),
Engine.Aurora,
),
)
for aurora in aurora_it:
Expand All @@ -239,6 +240,7 @@ async def run_replan(self) -> None:
redshift_enumerator.scaling_to_distance(
ctx.current_blueprint.redshift_provisioning(),
ctx.planner_config.max_provisioning_multiplier(),
Engine.Redshift,
),
)
for redshift in redshift_it:
Expand Down
14 changes: 8 additions & 6 deletions src/brad/planner/beam/query_based_candidate.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,6 +433,7 @@ def find_best_provisioning(self, ctx: ScoringContext) -> None:
aurora_enumerator.scaling_to_distance(
ctx.current_blueprint.aurora_provisioning(),
ctx.planner_config.max_provisioning_multiplier(),
Engine.Aurora,
),
)

Expand All @@ -448,6 +449,7 @@ def find_best_provisioning(self, ctx: ScoringContext) -> None:
redshift_enumerator.scaling_to_distance(
ctx.current_blueprint.redshift_provisioning(),
ctx.planner_config.max_provisioning_multiplier(),
Engine.Redshift,
),
)

Expand Down Expand Up @@ -531,12 +533,12 @@ def compute_runtime_feasibility(self, ctx: ScoringContext) -> None:
self.aurora_score.overall_cpu_denorm
>= self.aurora_score.pred_txn_peak_cpu_denorm
):
logger.debug(
"Txn not feasible. %s, pred denorm %.2f, peak denorm %.2f",
self.aurora_provisioning,
self.aurora_score.overall_cpu_denorm,
self.aurora_score.pred_txn_peak_cpu_denorm,
)
# logger.debug(
# "Txn not feasible. %s, pred denorm %.2f, peak denorm %.2f",
# self.aurora_provisioning,
# self.aurora_score.overall_cpu_denorm,
# self.aurora_score.pred_txn_peak_cpu_denorm,
# )
self.feasibility = BlueprintFeasibility.Infeasible
return

Expand Down
2 changes: 2 additions & 0 deletions src/brad/planner/beam/table_based.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,7 @@ async def run_replan(self) -> None:
aurora_enumerator.scaling_to_distance(
ctx.current_blueprint.aurora_provisioning(),
ctx.planner_config.max_provisioning_multiplier(),
Engine.Aurora,
),
)
for aurora in aurora_it:
Expand All @@ -219,6 +220,7 @@ async def run_replan(self) -> None:
redshift_enumerator.scaling_to_distance(
ctx.current_blueprint.redshift_provisioning(),
ctx.planner_config.max_provisioning_multiplier(),
Engine.Redshift,
),
)
for redshift in redshift_it:
Expand Down
2 changes: 2 additions & 0 deletions src/brad/planner/beam/table_based_candidate.py
Original file line number Diff line number Diff line change
Expand Up @@ -416,6 +416,7 @@ def find_best_provisioning(self, ctx: ScoringContext) -> None:
aurora_enumerator.scaling_to_distance(
ctx.current_blueprint.aurora_provisioning(),
ctx.planner_config.max_provisioning_multiplier(),
Engine.Aurora,
),
)

Expand All @@ -431,6 +432,7 @@ def find_best_provisioning(self, ctx: ScoringContext) -> None:
redshift_enumerator.scaling_to_distance(
ctx.current_blueprint.redshift_provisioning(),
ctx.planner_config.max_provisioning_multiplier(),
Engine.Redshift,
),
)

Expand Down
8 changes: 6 additions & 2 deletions src/brad/planner/enumeration/neighborhood.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,14 @@ def enumerate(
redshift_enum = ProvisioningEnumerator(Engine.Redshift)

max_aurora_dist = aurora_enum.scaling_to_distance(
base_blueprint.aurora_provisioning(), max_provisioning_multiplier
base_blueprint.aurora_provisioning(),
max_provisioning_multiplier,
Engine.Aurora,
)
max_redshift_dist = redshift_enum.scaling_to_distance(
base_blueprint.redshift_provisioning(), max_provisioning_multiplier
base_blueprint.redshift_provisioning(),
max_provisioning_multiplier,
Engine.Redshift,
)

for aurora_prov in aurora_enum.enumerate_nearby(
Expand Down
56 changes: 49 additions & 7 deletions src/brad/planner/enumeration/provisioning.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import json
import math

from typing import Iterator, Dict
from typing import Iterator, Dict, Tuple, Optional

from brad.blueprint.provisioning import Provisioning
from brad.config.engine import Engine
Expand Down Expand Up @@ -86,17 +86,36 @@ def enumerate_nearby(
# part of the planner transition score.

def scaling_to_distance(
self, base_provisioning: Provisioning, max_scaling_multiplier: float
self,
base_provisioning: Provisioning,
max_scaling_multiplier: float,
engine: Engine,
) -> float:
"""
Helps with selecting a maximum distance value based on intuitive scaling
limits on the existing provisioning.
"""
return (
float(self._instances[base_provisioning.instance_type()]["resource_value"])
* base_provisioning.num_nodes()
* max_scaling_multiplier
)
assert engine != Engine.Athena

if base_provisioning.num_nodes() == 0:
# Special case. Instead of using the current provisioning (which has
# 0 nodes, i.e., the engine is off), we use the "smallest" possible
# provisioning instead.
if engine == Engine.Aurora:
base_value = float(_MIN_AURORA[1]["resource_value"])
elif engine == Engine.Redshift:
base_value = float(_MIN_REDSHIFT[1]["resource_value"])
else:
raise RuntimeError("Unsupported engine {}".format(engine))
else:
base_value = (
float(
self._instances[base_provisioning.instance_type()]["resource_value"]
)
* base_provisioning.num_nodes()
)

return base_value * max_scaling_multiplier

def _compute_distance(self, source_value: float, dest: Provisioning) -> float:
return self._compute_resource_value(dest) - source_value
Expand Down Expand Up @@ -126,5 +145,28 @@ def _load_instance_resources(file_name: str) -> Dict[str, Dict[str, int | float]
return instances_map


def _find_min_instance(
instances: Dict[str, Dict[str, int | float]]
) -> Tuple[str, Dict[str, int | float]]:
min_type: Optional[str] = None
min_resources: Optional[Dict[str, int | float]] = None

for instance_type, resources in instances.items():
if (
min_resources is None
# pylint: disable-next=unsubscriptable-object
or resources["resource_value"] < min_resources["resource_value"]
):
min_type = instance_type
min_resources = resources

assert min_type is not None
assert min_resources is not None
return min_type, min_resources


_REDSHIFT_INSTANCES = _load_instance_resources("redshift_instances.json")
_AURORA_INSTANCES = _load_instance_resources("aurora_postgresql_instances.json")

_MIN_REDSHIFT = _find_min_instance(_REDSHIFT_INSTANCES)
_MIN_AURORA = _find_min_instance(_AURORA_INSTANCES)
5 changes: 3 additions & 2 deletions tests/test_enumeration.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def test_provisioning_enumerate_aurora():
aurora_nearby = [
p.clone()
for p in aurora.enumerate_nearby(
base_aurora, aurora.scaling_to_distance(base_aurora, 2)
base_aurora, aurora.scaling_to_distance(base_aurora, 2, Engine.Aurora)
)
]

Expand All @@ -28,7 +28,8 @@ def test_provisioning_enumerate_redshift():
redshift_nearby = [
p.clone()
for p in redshift.enumerate_nearby(
base_redshift, redshift.scaling_to_distance(base_redshift, 2)
base_redshift,
redshift.scaling_to_distance(base_redshift, 2, Engine.Redshift),
)
]

Expand Down

0 comments on commit 43a0c3c

Please sign in to comment.