From 894971b3d02a5b56e1e70416cf4ee18ee2e76278 Mon Sep 17 00:00:00 2001 From: Michael Bauer Date: Wed, 29 Jan 2025 01:09:20 +0000 Subject: [PATCH 1/5] foo --- systems/drone/Dockerfile | 2 +- systems/drone/build_and_push.sh | 2 +- systems/drone/metrics/batch_metrics.py | 70 ++++++++++--- systems/drone/metrics/metrics.py | 139 ++++++++++++++++--------- 4 files changed, 150 insertions(+), 63 deletions(-) diff --git a/systems/drone/Dockerfile b/systems/drone/Dockerfile index 42cd636..ffb6504 100644 --- a/systems/drone/Dockerfile +++ b/systems/drone/Dockerfile @@ -1,4 +1,4 @@ -FROM 909785973729.dkr.ecr.us-east-1.amazonaws.com/drone-simulator:0de4571c4816b18d3f2d917a3be84d7d3b7fb182 +FROM 909785973729.dkr.ecr.us-east-1.amazonaws.com/drone-simulator:swarm-seed ARG VELOCITY_COST_OVERRIDE ENV VELOCITY_COST_OVERRIDE=$VELOCITY_COST_OVERRIDE diff --git a/systems/drone/build_and_push.sh b/systems/drone/build_and_push.sh index 5680a6a..2464aff 100755 --- a/systems/drone/build_and_push.sh +++ b/systems/drone/build_and_push.sh @@ -60,7 +60,7 @@ echo "Building the metrics build image..." echo "Performing ECR Login..." -aws ecr get-login-password --region "${RESIM_SANDBOX_ECR_REGION}" \ +aws ecr --profile infrastructure get-login-password --region "${RESIM_SANDBOX_ECR_REGION}" \ | docker login --username AWS --password-stdin "${RESIM_SANDBOX_ECR}" echo "Pushing build image..." diff --git a/systems/drone/metrics/batch_metrics.py b/systems/drone/metrics/batch_metrics.py index 1c63434..5d2c888 100644 --- a/systems/drone/metrics/batch_metrics.py +++ b/systems/drone/metrics/batch_metrics.py @@ -60,9 +60,7 @@ def boundary(i: int): frac = i / bins return lower_bound * (1 - frac) + upper_bound * frac - return [ - HistogramBucket(lower=boundary(i), upper=boundary(i + 1)) for i in range(bins) - ] + return [HistogramBucket(lower=boundary(i), upper=boundary(i + 1)) for i in range(bins)] async def compute_batch_metrics( @@ -74,9 +72,7 @@ async def compute_batch_metrics( ################################################################################ client = AuthenticatedClient(base_url=api_url, token=token) - batch = await get_batch.asyncio( - project_id=project_id, batch_id=batch_id, client=client - ) + batch = await get_batch.asyncio(project_id=project_id, batch_id=batch_id, client=client) job_pages = await async_fetch_all_pages( list_jobs.asyncio, @@ -157,16 +153,14 @@ async def compute_batch_metrics( job_status_counts_data = GroupedMetricsData( name="job_status_counts_data", category_to_series={ - status: np.array([count]) - for (status, count) in zip(barnames.series, count.series) + status: np.array([count]) for (status, count) in zip(barnames.series, count.series) }, ) job_status_counts_status = GroupedMetricsData( name="job_status_counts_status", category_to_series={ - status: np.array([MetricStatus.PASSED_METRIC_STATUS]) - for status in barnames.series + status: np.array([MetricStatus.PASSED_METRIC_STATUS]) for status in barnames.series }, ) @@ -270,9 +264,7 @@ async def compute_batch_metrics( ################################################################################ # SPEEDS HISTOGRAM # - allspeeds = SeriesMetricsData( - name="all_speeds", series=np.array(speeds_df["speed (m/s)"]) - ) + allspeeds = SeriesMetricsData(name="all_speeds", series=np.array(speeds_df["speed (m/s)"])) allspeeds_statuses = SeriesMetricsData( name="all_speeds_statuses", series=np.array([MetricStatus.PASSED_METRIC_STATUS] * len(allspeeds.series)), @@ -309,6 +301,58 @@ async def compute_batch_metrics( .with_unit("m/s") ) + ################################################################################ + # DISTANCES HISTOGRAM + separations_df = pd.DataFrame( + [ + [time, separation, test_names[job_id]] + for job_id in metrics_data + for (time, separation) in zip( + metrics_data_by_name[job_id]["separation_times"].series, + metrics_data_by_name[job_id]["separations"].series, + ) + ], + columns=["time (s)", "separation (m)", "test"], + ) + allseparations = SeriesMetricsData( + name="all_saparations", series=np.array(separations_df["separation (m)"]) + ) + allseparations_statuses = SeriesMetricsData( + name="all_separations_statuses", + series=np.array([MetricStatus.PASSED_METRIC_STATUS] * len(allseparations.series)), + ) + + buckets = compute_buckets(allseparations) + + ( + metrics_writer.add_histogram_metric(name=f"Drone separation Distribution") + .with_description(f"Drone separation distribution across the batch") + .with_status(MetricStatus.PASSED_METRIC_STATUS) + .with_importance(MetricImportance.ZERO_IMPORTANCE) + .with_should_display(True) + .with_blocking(False) + .with_values_data(allseparations) + .with_statuses_data(allseparations_statuses) + .with_buckets(buckets) + .with_lower_bound(buckets[0].lower) + .with_upper_bound(buckets[-1].upper) + .with_x_axis_name("separation (m)") + ) + + ################################################################################ + # separationS MEAN + # + ( + metrics_writer.add_scalar_metric("Batch Mean Ego Separation") + .with_description("mean separation over the batch for longitudinal reporting") + .with_status(MetricStatus.PASSED_METRIC_STATUS) + .with_importance(MetricImportance.ZERO_IMPORTANCE) + .with_should_display(False) # Don't display. Only want for reports. + .with_blocking(False) + .with_value(np.mean(allseparations.series)) + .with_unit("m") + ) + write_proto(metrics_writer, "/tmp/resim/outputs/metrics.binproto") diff --git a/systems/drone/metrics/metrics.py b/systems/drone/metrics/metrics.py index b33e786..5186f44 100644 --- a/systems/drone/metrics/metrics.py +++ b/systems/drone/metrics/metrics.py @@ -4,6 +4,7 @@ # license that can be found in the LICENSE file or at # https://opensource.org/licenses/MIT. +import argparse import asyncio import mcap.reader import uuid @@ -53,9 +54,9 @@ _TOPICS = ["/actor_states"] -def load_experience(): +def load_experience(experience_path): """Load the experience proto from a file.""" - with open(EXPERIENCE_PATH, "rb") as fp: + with open(experience_path, "rb") as fp: experience = text_format.Parse(fp.read(), Experience()) return experience @@ -75,23 +76,17 @@ def mapped_point(index): wireframe_collection.set(segments=segments) -def load_log() -> list[dict[str, typing.Any]]: +def load_log(log_path) -> list[dict[str, typing.Any]]: """Load the log from an mcap file.""" messages = collections.defaultdict(list) - with open(LOG_PATH, "rb") as converted_log: - reader = mcap.reader.make_reader( - converted_log, decoder_factories=[DecoderFactory()] - ) - for _, channel, _, message_proto in reader.iter_decoded_messages( - topics=_TOPICS - ): + with open(log_path, "rb") as converted_log: + reader = mcap.reader.make_reader(converted_log, decoder_factories=[DecoderFactory()]) + for _, channel, _, message_proto in reader.iter_decoded_messages(topics=_TOPICS): messages[channel.topic].append(message_proto) return messages -def make_gif_metric( - writer, wireframe, poses: list[se3_python.SE3], times, goal -) -> None: +def make_gif_metric(writer, wireframe, poses: list[se3_python.SE3], times, goal, out_dir) -> None: """Make a couple of gif metrics of the drone moving around.""" trajectory = np.array([pose.translation() for pose in poses]) @@ -131,10 +126,8 @@ def animate(i: int): to_goal.set_data_3d(*([trajectory[i, j], goal[j]] for j in range(3))) ani = animation.FuncAnimation(fig, animate, num_steps) - pillow_writer = animation.PillowWriter( - fps=10, metadata=dict(artist="Me"), bitrate=1800 - ) - ani.save("/tmp/resim/outputs/pose.gif", writer=pillow_writer) + pillow_writer = animation.PillowWriter(fps=10, metadata=dict(artist="Me"), bitrate=1800) + ani.save(out_dir / "pose.gif", writer=pillow_writer) status = MetricStatus.PASSED_METRIC_STATUS @@ -170,9 +163,7 @@ def animate_map(i: int): marker.set_data([trajectory[i, 0]], [trajectory[i, 1]]) ani = animation.FuncAnimation(fig, animate_map, num_steps) - pillow_writer = animation.PillowWriter( - fps=10, metadata=dict(artist="Me"), bitrate=1800 - ) + pillow_writer = animation.PillowWriter(fps=10, metadata=dict(artist="Me"), bitrate=1800) ani.save("/tmp/resim/outputs/top_down.gif", writer=pillow_writer) data = ExternalFileMetricsData(name="top_down_gif_data", filename="top_down.gif") @@ -188,7 +179,51 @@ def animate_map(i: int): ) -def ego_metrics(writer, log): +def average_distance_metric(writer, log): + states_over_time = log["/actor_states"] + + pose_bundles = [] + times = [] + + mean_distances = [] + + # TODO maybe produce a 2d plot + + def time_to_s(t): + return t.seconds + 1e-9 * t.nanos + + for bundle in log["/actor_states"]: + distances = [] + + poses = [se3_python.SE3.exp(s.state.ref_from_frame.algebra) for s in bundle.states] + for ii in range(len(poses)): + for jj in range(ii + 1, len(poses)): + distances.append(np.linalg.norm(poses[ii].translation() - poses[jj].translation())) + pose_bundles.append(poses) + + mean_distances.append(np.mean(distances)) + times.append(time_to_s(bundle.states[0].time_of_validity)) + + failure_def = DoubleFailureDefinition(fails_below=0.1, fails_above=None) + ( + writer.add_scalar_metric("Mean Distance Between Drones") + .with_failure_definition(failure_def) + .with_value(np.mean(mean_distances)) + .with_description("Mean distance between swarm members during sim") + .with_blocking(False) + .with_should_display(True) + .with_importance(MetricImportance.ZERO_IMPORTANCE) + .with_status(MetricStatus.PASSED_METRIC_STATUS) + .with_unit("m") + ) + separations = SeriesMetricsData("separations", series=np.array(mean_distances), unit="m") + times = SeriesMetricsData("separation_times", series=np.array(times), unit="s") + + writer.add_metrics_data(separations) + writer.add_metrics_data(times) + + +def ego_metrics(writer, experience, log, out_dir): """Compute the job metrics for the ego.""" ################################################################################ # EXTRACT USEFUL INFO FROM LOG + EXPERIENCE @@ -201,18 +236,13 @@ def ego_metrics(writer, log): if state.is_spawned: id_to_states[state.id.data].append(state) - experience = load_experience() ego_actors = [ - a - for a in experience.dynamic_behavior.actors - if a.actor_type == Actor.SYSTEM_UNDER_TEST + a for a in experience.dynamic_behavior.actors if a.actor_type == Actor.SYSTEM_UNDER_TEST ] ego_actor = ego_actors[0] ego_id = ego_actor.id.data ego_geometry = [ - g - for g in experience.geometries - if g.id.data == ego_actor.geometries[0].geometry_id.data + g for g in experience.geometries if g.id.data == ego_actor.geometries[0].geometry_id.data ][0] ego_states = id_to_states[ego_id] @@ -230,7 +260,7 @@ def time_to_s(t): ][0] ego_goal = np.array(ego_movement_model.ilqr_drone.goal_position) - make_gif_metric(writer, ego_geometry, poses, times, ego_goal) + # make_gif_metric(writer, ego_geometry, poses, times, ego_goal, out_dir) ego_states = ego_states[0::10] @@ -267,8 +297,7 @@ def time_to_s(t): # SPEED OVER TIME PLOT # velocities = [ - p.rotation() * np.array(s.state.d_ref_from_frame[3:]) - for p, s in zip(poses, ego_states) + p.rotation() * np.array(s.state.d_ref_from_frame[3:]) for p, s in zip(poses, ego_states) ] speeds = np.array([np.linalg.norm(v) for v in velocities]) @@ -335,9 +364,7 @@ def status_from_speed(speed): category_to_series={ "ego": np.array( [ - Timestamp( - secs=s.time_of_validity.seconds, nanos=s.time_of_validity.nanos - ) + Timestamp(secs=s.time_of_validity.seconds, nanos=s.time_of_validity.nanos) for s in ego_states ] ) @@ -345,16 +372,12 @@ def status_from_speed(speed): ) speed_states = GroupedMetricsData( "speed_states", - category_to_series={ - "ego": np.array([status_from_speed(s).name for s in speeds.series]) - }, + category_to_series={"ego": np.array([status_from_speed(s).name for s in speeds.series])}, index_data=timestamps, ) speed_states_status = GroupedMetricsData( "speed_states_status", - category_to_series={ - "ego": np.array([status_from_speed(s) for s in speeds.series]) - }, + category_to_series={"ego": np.array([status_from_speed(s) for s in speeds.series])}, index_data=timestamps, ) @@ -487,21 +510,19 @@ def pose_from_state(state): break -def write_proto(writer): +def write_proto(writer, metrics_path): """Write out the binproto for our metrics""" metrics_proto = writer.write() validate_job_metrics(metrics_proto.metrics_msg) # Known location where the runner looks for metrics - with open(METRICS_PATH, "wb") as f: + with open(metrics_path, "wb") as f: f.write(metrics_proto.metrics_msg.SerializeToString()) async def maybe_batch_metrics(): """Run batch metrics if the config is present.""" if BATCH_METRICS_CONFIG_PATH.is_file(): - with open( - BATCH_METRICS_CONFIG_PATH, "r", encoding="utf-8" - ) as metrics_config_file: + with open(BATCH_METRICS_CONFIG_PATH, "r", encoding="utf-8") as metrics_config_file: metrics_config = json.load(metrics_config_file) await compute_batch_metrics( token=metrics_config["authToken"], @@ -515,13 +536,35 @@ async def maybe_batch_metrics(): async def main(): + parser = argparse.ArgumentParser(description="Parse file paths with kebab-style flags.") + parser.add_argument( + "--log-path", + default="/tmp/resim/inputs/logs/resim_log.mcap", + help="Path to the log file (default: /tmp/resim/inputs/logs/resim_log.mcap)", + ) + parser.add_argument( + "--experience-path", + default="/tmp/resim/inputs/experience/experience.sim", + help="Path to the experience file (default: /tmp/resim/inputs/experience/experience.sim)", + ) + parser.add_argument( + "--metrics-path", + default="/tmp/resim/outputs/metrics.binproto", + help="Path to the metrics file (default: /tmp/resim/outputs/metrics.binproto)", + ) + args = parser.parse_args() + await maybe_batch_metrics() - log = load_log() + log = load_log(args.log_path) + experience = load_experience(args.experience_path) metrics_writer = ResimMetricsWriter(uuid.uuid4()) # Make metrics writer! - ego_metrics(metrics_writer, log) - write_proto(metrics_writer) + + out_dir = Path(args.metrics_path).parent + ego_metrics(metrics_writer, experience, log, out_dir) + average_distance_metric(metrics_writer, log) + write_proto(metrics_writer, args.metrics_path) if __name__ == "__main__": From c24ecdf3365cf3780bcacf41e2d195c7b2e31062 Mon Sep 17 00:00:00 2001 From: Michael Bauer Date: Wed, 29 Jan 2025 01:20:23 +0000 Subject: [PATCH 2/5] tag --- systems/drone/metrics/batch_metrics.py | 1 + 1 file changed, 1 insertion(+) diff --git a/systems/drone/metrics/batch_metrics.py b/systems/drone/metrics/batch_metrics.py index 5d2c888..e8d6b0a 100644 --- a/systems/drone/metrics/batch_metrics.py +++ b/systems/drone/metrics/batch_metrics.py @@ -351,6 +351,7 @@ async def compute_batch_metrics( .with_blocking(False) .with_value(np.mean(allseparations.series)) .with_unit("m") + .with_tag(key="RESIM_SUMMARY", value="1") ) write_proto(metrics_writer, "/tmp/resim/outputs/metrics.binproto") From a3c12a1d18423ef1ba1a9eb9d87d0b009c85efff Mon Sep 17 00:00:00 2001 From: Michael Bauer Date: Fri, 31 Jan 2025 20:02:07 +0000 Subject: [PATCH 3/5] Add create build script with config file --- builds.yaml | 43 ++++++ requirements.txt | 6 + requirements_lock.txt | 103 +++++++++++++ scripts/create_builds.py | 313 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 465 insertions(+) create mode 100644 builds.yaml create mode 100644 requirements.txt create mode 100644 requirements_lock.txt create mode 100755 scripts/create_builds.py diff --git a/builds.yaml b/builds.yaml new file mode 100644 index 0000000..7174b4c --- /dev/null +++ b/builds.yaml @@ -0,0 +1,43 @@ +--- +project: "Lain's Playground: Sandbox V2" + + +registries: + resim-infra: + account_id: "909785973729" + region: "us-east-1" + auth: + profile: "infrastructure" + +resim_app_config: + client_id: "gTp1Y0kOyQ7QzIo2lZm0auGM6FJZZVvy" + auth_url: "https://resim.us.auth0.com/" + api_url: "https://api.resim.ai/v1/" + +experience_build_configs: + drone_experience: + description: "A drone experience build" + repo: + name: "drone-simulator" + registry: "resim-infra" + version_tag_prefix: "drone_sim_" + system: "Drone Motion Planning System" + branch: auto + version: auto + + build_command: + path: "systems/drone/" + +metrics_build_configs: + drone_metrics: + name: "A drone metrics build" + repo: + name: "drone-simulator" + registry: "resim-infra" + version_tag_prefix: "drone_sim_metrics_" + systems: + - "Drone Motion Planning System" + version: auto + + build_command: + path: "systems/drone/metrics/" diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..7104a91 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,6 @@ +docker>=7.1.0 +resim-open-core>=0.6.1 +PyYAML>=6.0.2 +boto3>=1.36.10 +GitPython>=3.1.44 +pydantic>=2.10.6 diff --git a/requirements_lock.txt b/requirements_lock.txt new file mode 100644 index 0000000..5a44325 --- /dev/null +++ b/requirements_lock.txt @@ -0,0 +1,103 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --output-file=requirements_lock.txt requirements.txt +# +annotated-types==0.7.0 + # via pydantic +anyio==4.8.0 + # via httpx +attrs==25.1.0 + # via resim-open-core +boto3==1.36.10 + # via -r requirements.txt +botocore==1.36.10 + # via + # boto3 + # s3transfer +certifi==2024.12.14 + # via + # httpcore + # httpx + # requests +charset-normalizer==3.4.1 + # via requests +docker==7.1.0 + # via -r requirements.txt +exceptiongroup==1.2.2 + # via anyio +gitdb==4.0.12 + # via gitpython +gitpython==3.1.44 + # via -r requirements.txt +h11==0.14.0 + # via httpcore +httpcore==1.0.7 + # via httpx +httpx==0.28.1 + # via resim-open-core +idna==3.10 + # via + # anyio + # httpx + # requests +jmespath==1.0.1 + # via + # boto3 + # botocore +narwhals==1.24.1 + # via plotly +numpy==2.2.2 + # via + # pandas + # resim-open-core +packaging==24.2 + # via plotly +pandas==2.2.3 + # via resim-open-core +plotly==6.0.0 + # via resim-open-core +polling2==0.5.0 + # via resim-open-core +protobuf==5.29.3 + # via resim-open-core +pydantic==2.10.6 + # via -r requirements.txt +pydantic-core==2.27.2 + # via pydantic +python-dateutil==2.9.0.post0 + # via + # botocore + # pandas + # resim-open-core +pytz==2024.2 + # via pandas +pyyaml==6.0.2 + # via -r requirements.txt +requests==2.32.3 + # via + # docker + # resim-open-core +resim-open-core==0.6.1 + # via -r requirements.txt +s3transfer==0.11.2 + # via boto3 +six==1.17.0 + # via python-dateutil +smmap==5.0.2 + # via gitdb +sniffio==1.3.1 + # via anyio +typing-extensions==4.12.2 + # via + # anyio + # pydantic + # pydantic-core +tzdata==2025.1 + # via pandas +urllib3==2.3.0 + # via + # botocore + # docker + # requests diff --git a/scripts/create_builds.py b/scripts/create_builds.py new file mode 100755 index 0000000..4d1a2ec --- /dev/null +++ b/scripts/create_builds.py @@ -0,0 +1,313 @@ +#!/bin/python + +import argparse +import base64 +import logging +from http import HTTPStatus + +import boto3 +import docker +import git +import yaml +from docker.client import DockerClient +from pydantic import BaseModel +from resim.auth.python.device_code_client import DeviceCodeClient +from resim.metrics.fetch_all_pages import fetch_all_pages +from resim_python_client.api.builds import create_build_for_branch +from resim_python_client.api.metrics_builds import create_metrics_build +from resim_python_client.api.projects import ( + create_branch_for_project, + list_branches_for_project, + list_projects, +) +from resim_python_client.api.systems import add_system_to_metrics_build, list_systems +from resim_python_client.client import AuthenticatedClient +from resim_python_client.models import ( + BranchType, + CreateBranchInput, + CreateBuildForBranchInput, + CreateMetricsBuildInput, +) + +logger = logging.getLogger("create_builds") +logger.setLevel(logging.INFO) + + +class ResimAppConfig(BaseModel): + client_id: str + auth_url: str + api_url: str + + +class RegistryAuth(BaseModel): + profile: str + + +class ImageRegistry(BaseModel): + account_id: str + region: str + auth: RegistryAuth + + +class ImageRepo(BaseModel): + name: str + registry: str + + +class BuildCommand(BaseModel): + path: str + + +class ExperienceBuild(BaseModel): + description: str + repo: ImageRepo + version_tag_prefix: str + system: str + branch: str + version: str + build_command: BuildCommand + + +class MetricsBuild(BaseModel): + name: str + repo: ImageRepo + version_tag_prefix: str + systems: list[str] + version: str + build_command: BuildCommand + + +class Builds(BaseModel): + project: str + registries: dict[str, ImageRegistry] + resim_app_config: ResimAppConfig + experience_build_configs: dict[str, ExperienceBuild] + metrics_build_configs: dict[str, MetricsBuild] + + +def get_version(version: str) -> str: + if version == "auto": + repo = git.Repo(search_parent_directories=True) + return repo.head.object.hexsha + return version + + +def get_branch(branch: str) -> str: + if branch == "auto": + repo = git.Repo(search_parent_directories=True) + return repo.active_branch.name + return branch + + +def docker_ecr_auth(client: DockerClient, registry: ImageRegistry): + session = boto3.Session(profile_name=registry.auth.profile) + ecr_client = session.client("ecr", region_name=registry.region) + token = ecr_client.get_authorization_token() + password = ( + base64.b64decode(token["authorizationData"][0]["authorizationToken"]) + .decode() + .split(":")[1] + ) + registry_url = f"{registry.account_id}.dkr.ecr.{registry.region}.amazonaws.com" + client.login(username="AWS", password=password, registry=registry_url) + logger.info("Successfully authenticated to %s.", registry_url) + + +def get_client(config: ResimAppConfig) -> AuthenticatedClient: + auth_client = DeviceCodeClient(domain=config.auth_url, client_id=config.client_id) + token = auth_client.get_jwt()["access_token"] + client = AuthenticatedClient(base_url=config.api_url, token=token) + + return client + + +def get_project(project: str, client: AuthenticatedClient) -> str: + project_pages = fetch_all_pages(list_projects.sync, client=client) + projects = {p.name: p for page in project_pages for p in page.projects} + return projects[project] + + +def open_config() -> dict: + with open("builds.yaml", "r", encoding="utf-8") as f: + return yaml.load(f, Loader=yaml.SafeLoader) + + +def list_command(builds, args): + logger.info("Experience Builds:") + for build in builds.experience_build_configs: + logger.info(" %s", build) + + logger.info("Metrics Builds:") + for build in builds.metrics_build_configs: + logger.info(" %s", build) + + +def get_systems(client: AuthenticatedClient, project_id: str) -> dict[str, str]: + system_pages = fetch_all_pages( + list_systems.sync, client=client, project_id=project_id + ) + return {p.name: p.system_id for page in system_pages for p in page.systems} + + +def get_branches(client: AuthenticatedClient, project_id: str) -> dict[str, str]: + branch_pages = fetch_all_pages( + list_branches_for_project.sync, client=client, project_id=project_id + ) + return {p.name: p.branch_id for page in branch_pages for p in page.branches} + + +def make_branch(client: AuthenticatedClient, project_id: str, branch: str): + response = create_branch_for_project.sync( + project_id=project_id, + client=client, + body=CreateBranchInput(branch_type=BranchType.CHANGE_REQUEST, name=branch), + ) + assert response is not None, "Failed to make branch" + logger.info("Registered branch with id %s", response.branch_id) + + return response.branch_id + + +def register_experience_build( + client: AuthenticatedClient, + project_id: str, + build: ExperienceBuild, + uri: str, + systems: dict[str, str], + branches: dict[str, str], +): + branch = get_branch(build.branch) + version = get_version(build.version) + if branch not in branches: + branches[branch] = make_branch(client, project_id, branch) + + response = create_build_for_branch.sync( + project_id=project_id, + branch_id=branches[branch], + client=client, + body=CreateBuildForBranchInput( + image_uri=uri, + system_id=systems[build.system], + version=version, + description=build.description, + ), + ) + assert response is not None + logger.info("Registered experience build with id %s", response.build_id) + + +def register_metrics_build( + client: AuthenticatedClient, + project_id: str, + build: MetricsBuild, + uri: str, + systems: dict[str, str], +): + version = get_version(build.version) + response = create_metrics_build.sync( + project_id=project_id, + client=client, + body=CreateMetricsBuildInput( + image_uri=uri, + name=build.name, + version=version, + ), + ) + assert response is not None + logger.info("Registered metrics build with id %s", response.metrics_build_id) + metrics_build_id = response.metrics_build_id + + for system in build.systems: + response = add_system_to_metrics_build.sync_detailed( + project_id=project_id, + system_id=systems[system], + metrics_build_id=metrics_build_id, + client=client, + ) + assert ( + response.status_code == HTTPStatus.CREATED + ), "Failed to add metrics build to system" + logger.info("Added metrics build %s to %s system", metrics_build_id, system) + + +def build_push(builds, args, *, push: bool): + client = get_client(builds.resim_app_config) + project_id = get_project(builds.project, client).project_id + systems = get_systems(client, project_id) + branches = get_branches(client, project_id) + + docker_client = docker.from_env() + + combined_map = builds.experience_build_configs | builds.metrics_build_configs + for target in args.target_builds: + build = combined_map[target] + repo = build.repo + registry = builds.registries[repo.registry] + + docker_ecr_auth(docker_client, registry) + command_path = build.build_command.path + + full_repo_name = ( + f"{registry.account_id}.dkr.ecr.{registry.region}.amazonaws.com/{repo.name}" + ) + version = get_version(build.version) + tag = f"{build.version_tag_prefix}{version}" + uri = f"{full_repo_name}:{tag}" + + response = docker_client.api.build(path=command_path, tag=uri, decode=True) + for line in response: + logger.info(" ".join((str(v) for v in line.values()))) + + if not push: + continue + + response = docker_client.api.push( + repository=full_repo_name, tag=tag, stream=True, decode=True + ) + for line in response: + logger.info(" ".join((str(v) for v in line.values()))) + + if isinstance(build, ExperienceBuild): + register_experience_build(client, project_id, build, uri, systems, branches) + elif isinstance(build, MetricsBuild): + register_metrics_build(client, project_id, build, uri, systems) + else: + raise RuntimeError("Bad build type!") + + +def push_command(builds, args): + build_push(builds, args, push=True) + + +def build_command(builds, args): + build_push(builds, args, push=False) + + +def main(): + logging.basicConfig() + parser = argparse.ArgumentParser( + prog="create_builds", + description="A simple CLI for building, pushing, and registering builds.", + ) + subparsers = parser.add_subparsers(title="Commands", dest="command", required=True) + + list_parser = subparsers.add_parser("list", help="List all resources") + list_parser.set_defaults(func=list_command) + + # Build command + build_parser = subparsers.add_parser("build", help="Build a selected build") + build_parser.add_argument("target_builds", nargs="*") + build_parser.set_defaults(func=build_command) + + # Push command + push_parser = subparsers.add_parser("push", help="Push a selected build") + push_parser.add_argument("target_builds", nargs="*") + push_parser.set_defaults(func=push_command) + + args = parser.parse_args() + builds = Builds(**open_config()) + args.func(builds, args) # Call the appropriate function + + +if __name__ == "__main__": + main() From 46a18b6cf4d7b766809f75d498942e47f0a9fa56 Mon Sep 17 00:00:00 2001 From: Michael Bauer Date: Fri, 31 Jan 2025 20:04:55 +0000 Subject: [PATCH 4/5] remove system changes --- systems/drone/Dockerfile | 2 +- systems/drone/build_and_push.sh | 2 +- systems/drone/metrics/batch_metrics.py | 71 +++---------- systems/drone/metrics/metrics.py | 139 +++++++++---------------- 4 files changed, 63 insertions(+), 151 deletions(-) diff --git a/systems/drone/Dockerfile b/systems/drone/Dockerfile index ffb6504..42cd636 100644 --- a/systems/drone/Dockerfile +++ b/systems/drone/Dockerfile @@ -1,4 +1,4 @@ -FROM 909785973729.dkr.ecr.us-east-1.amazonaws.com/drone-simulator:swarm-seed +FROM 909785973729.dkr.ecr.us-east-1.amazonaws.com/drone-simulator:0de4571c4816b18d3f2d917a3be84d7d3b7fb182 ARG VELOCITY_COST_OVERRIDE ENV VELOCITY_COST_OVERRIDE=$VELOCITY_COST_OVERRIDE diff --git a/systems/drone/build_and_push.sh b/systems/drone/build_and_push.sh index 2464aff..5680a6a 100755 --- a/systems/drone/build_and_push.sh +++ b/systems/drone/build_and_push.sh @@ -60,7 +60,7 @@ echo "Building the metrics build image..." echo "Performing ECR Login..." -aws ecr --profile infrastructure get-login-password --region "${RESIM_SANDBOX_ECR_REGION}" \ +aws ecr get-login-password --region "${RESIM_SANDBOX_ECR_REGION}" \ | docker login --username AWS --password-stdin "${RESIM_SANDBOX_ECR}" echo "Pushing build image..." diff --git a/systems/drone/metrics/batch_metrics.py b/systems/drone/metrics/batch_metrics.py index e8d6b0a..1c63434 100644 --- a/systems/drone/metrics/batch_metrics.py +++ b/systems/drone/metrics/batch_metrics.py @@ -60,7 +60,9 @@ def boundary(i: int): frac = i / bins return lower_bound * (1 - frac) + upper_bound * frac - return [HistogramBucket(lower=boundary(i), upper=boundary(i + 1)) for i in range(bins)] + return [ + HistogramBucket(lower=boundary(i), upper=boundary(i + 1)) for i in range(bins) + ] async def compute_batch_metrics( @@ -72,7 +74,9 @@ async def compute_batch_metrics( ################################################################################ client = AuthenticatedClient(base_url=api_url, token=token) - batch = await get_batch.asyncio(project_id=project_id, batch_id=batch_id, client=client) + batch = await get_batch.asyncio( + project_id=project_id, batch_id=batch_id, client=client + ) job_pages = await async_fetch_all_pages( list_jobs.asyncio, @@ -153,14 +157,16 @@ async def compute_batch_metrics( job_status_counts_data = GroupedMetricsData( name="job_status_counts_data", category_to_series={ - status: np.array([count]) for (status, count) in zip(barnames.series, count.series) + status: np.array([count]) + for (status, count) in zip(barnames.series, count.series) }, ) job_status_counts_status = GroupedMetricsData( name="job_status_counts_status", category_to_series={ - status: np.array([MetricStatus.PASSED_METRIC_STATUS]) for status in barnames.series + status: np.array([MetricStatus.PASSED_METRIC_STATUS]) + for status in barnames.series }, ) @@ -264,7 +270,9 @@ async def compute_batch_metrics( ################################################################################ # SPEEDS HISTOGRAM # - allspeeds = SeriesMetricsData(name="all_speeds", series=np.array(speeds_df["speed (m/s)"])) + allspeeds = SeriesMetricsData( + name="all_speeds", series=np.array(speeds_df["speed (m/s)"]) + ) allspeeds_statuses = SeriesMetricsData( name="all_speeds_statuses", series=np.array([MetricStatus.PASSED_METRIC_STATUS] * len(allspeeds.series)), @@ -301,59 +309,6 @@ async def compute_batch_metrics( .with_unit("m/s") ) - ################################################################################ - # DISTANCES HISTOGRAM - separations_df = pd.DataFrame( - [ - [time, separation, test_names[job_id]] - for job_id in metrics_data - for (time, separation) in zip( - metrics_data_by_name[job_id]["separation_times"].series, - metrics_data_by_name[job_id]["separations"].series, - ) - ], - columns=["time (s)", "separation (m)", "test"], - ) - allseparations = SeriesMetricsData( - name="all_saparations", series=np.array(separations_df["separation (m)"]) - ) - allseparations_statuses = SeriesMetricsData( - name="all_separations_statuses", - series=np.array([MetricStatus.PASSED_METRIC_STATUS] * len(allseparations.series)), - ) - - buckets = compute_buckets(allseparations) - - ( - metrics_writer.add_histogram_metric(name=f"Drone separation Distribution") - .with_description(f"Drone separation distribution across the batch") - .with_status(MetricStatus.PASSED_METRIC_STATUS) - .with_importance(MetricImportance.ZERO_IMPORTANCE) - .with_should_display(True) - .with_blocking(False) - .with_values_data(allseparations) - .with_statuses_data(allseparations_statuses) - .with_buckets(buckets) - .with_lower_bound(buckets[0].lower) - .with_upper_bound(buckets[-1].upper) - .with_x_axis_name("separation (m)") - ) - - ################################################################################ - # separationS MEAN - # - ( - metrics_writer.add_scalar_metric("Batch Mean Ego Separation") - .with_description("mean separation over the batch for longitudinal reporting") - .with_status(MetricStatus.PASSED_METRIC_STATUS) - .with_importance(MetricImportance.ZERO_IMPORTANCE) - .with_should_display(False) # Don't display. Only want for reports. - .with_blocking(False) - .with_value(np.mean(allseparations.series)) - .with_unit("m") - .with_tag(key="RESIM_SUMMARY", value="1") - ) - write_proto(metrics_writer, "/tmp/resim/outputs/metrics.binproto") diff --git a/systems/drone/metrics/metrics.py b/systems/drone/metrics/metrics.py index 5186f44..b33e786 100644 --- a/systems/drone/metrics/metrics.py +++ b/systems/drone/metrics/metrics.py @@ -4,7 +4,6 @@ # license that can be found in the LICENSE file or at # https://opensource.org/licenses/MIT. -import argparse import asyncio import mcap.reader import uuid @@ -54,9 +53,9 @@ _TOPICS = ["/actor_states"] -def load_experience(experience_path): +def load_experience(): """Load the experience proto from a file.""" - with open(experience_path, "rb") as fp: + with open(EXPERIENCE_PATH, "rb") as fp: experience = text_format.Parse(fp.read(), Experience()) return experience @@ -76,17 +75,23 @@ def mapped_point(index): wireframe_collection.set(segments=segments) -def load_log(log_path) -> list[dict[str, typing.Any]]: +def load_log() -> list[dict[str, typing.Any]]: """Load the log from an mcap file.""" messages = collections.defaultdict(list) - with open(log_path, "rb") as converted_log: - reader = mcap.reader.make_reader(converted_log, decoder_factories=[DecoderFactory()]) - for _, channel, _, message_proto in reader.iter_decoded_messages(topics=_TOPICS): + with open(LOG_PATH, "rb") as converted_log: + reader = mcap.reader.make_reader( + converted_log, decoder_factories=[DecoderFactory()] + ) + for _, channel, _, message_proto in reader.iter_decoded_messages( + topics=_TOPICS + ): messages[channel.topic].append(message_proto) return messages -def make_gif_metric(writer, wireframe, poses: list[se3_python.SE3], times, goal, out_dir) -> None: +def make_gif_metric( + writer, wireframe, poses: list[se3_python.SE3], times, goal +) -> None: """Make a couple of gif metrics of the drone moving around.""" trajectory = np.array([pose.translation() for pose in poses]) @@ -126,8 +131,10 @@ def animate(i: int): to_goal.set_data_3d(*([trajectory[i, j], goal[j]] for j in range(3))) ani = animation.FuncAnimation(fig, animate, num_steps) - pillow_writer = animation.PillowWriter(fps=10, metadata=dict(artist="Me"), bitrate=1800) - ani.save(out_dir / "pose.gif", writer=pillow_writer) + pillow_writer = animation.PillowWriter( + fps=10, metadata=dict(artist="Me"), bitrate=1800 + ) + ani.save("/tmp/resim/outputs/pose.gif", writer=pillow_writer) status = MetricStatus.PASSED_METRIC_STATUS @@ -163,7 +170,9 @@ def animate_map(i: int): marker.set_data([trajectory[i, 0]], [trajectory[i, 1]]) ani = animation.FuncAnimation(fig, animate_map, num_steps) - pillow_writer = animation.PillowWriter(fps=10, metadata=dict(artist="Me"), bitrate=1800) + pillow_writer = animation.PillowWriter( + fps=10, metadata=dict(artist="Me"), bitrate=1800 + ) ani.save("/tmp/resim/outputs/top_down.gif", writer=pillow_writer) data = ExternalFileMetricsData(name="top_down_gif_data", filename="top_down.gif") @@ -179,51 +188,7 @@ def animate_map(i: int): ) -def average_distance_metric(writer, log): - states_over_time = log["/actor_states"] - - pose_bundles = [] - times = [] - - mean_distances = [] - - # TODO maybe produce a 2d plot - - def time_to_s(t): - return t.seconds + 1e-9 * t.nanos - - for bundle in log["/actor_states"]: - distances = [] - - poses = [se3_python.SE3.exp(s.state.ref_from_frame.algebra) for s in bundle.states] - for ii in range(len(poses)): - for jj in range(ii + 1, len(poses)): - distances.append(np.linalg.norm(poses[ii].translation() - poses[jj].translation())) - pose_bundles.append(poses) - - mean_distances.append(np.mean(distances)) - times.append(time_to_s(bundle.states[0].time_of_validity)) - - failure_def = DoubleFailureDefinition(fails_below=0.1, fails_above=None) - ( - writer.add_scalar_metric("Mean Distance Between Drones") - .with_failure_definition(failure_def) - .with_value(np.mean(mean_distances)) - .with_description("Mean distance between swarm members during sim") - .with_blocking(False) - .with_should_display(True) - .with_importance(MetricImportance.ZERO_IMPORTANCE) - .with_status(MetricStatus.PASSED_METRIC_STATUS) - .with_unit("m") - ) - separations = SeriesMetricsData("separations", series=np.array(mean_distances), unit="m") - times = SeriesMetricsData("separation_times", series=np.array(times), unit="s") - - writer.add_metrics_data(separations) - writer.add_metrics_data(times) - - -def ego_metrics(writer, experience, log, out_dir): +def ego_metrics(writer, log): """Compute the job metrics for the ego.""" ################################################################################ # EXTRACT USEFUL INFO FROM LOG + EXPERIENCE @@ -236,13 +201,18 @@ def ego_metrics(writer, experience, log, out_dir): if state.is_spawned: id_to_states[state.id.data].append(state) + experience = load_experience() ego_actors = [ - a for a in experience.dynamic_behavior.actors if a.actor_type == Actor.SYSTEM_UNDER_TEST + a + for a in experience.dynamic_behavior.actors + if a.actor_type == Actor.SYSTEM_UNDER_TEST ] ego_actor = ego_actors[0] ego_id = ego_actor.id.data ego_geometry = [ - g for g in experience.geometries if g.id.data == ego_actor.geometries[0].geometry_id.data + g + for g in experience.geometries + if g.id.data == ego_actor.geometries[0].geometry_id.data ][0] ego_states = id_to_states[ego_id] @@ -260,7 +230,7 @@ def time_to_s(t): ][0] ego_goal = np.array(ego_movement_model.ilqr_drone.goal_position) - # make_gif_metric(writer, ego_geometry, poses, times, ego_goal, out_dir) + make_gif_metric(writer, ego_geometry, poses, times, ego_goal) ego_states = ego_states[0::10] @@ -297,7 +267,8 @@ def time_to_s(t): # SPEED OVER TIME PLOT # velocities = [ - p.rotation() * np.array(s.state.d_ref_from_frame[3:]) for p, s in zip(poses, ego_states) + p.rotation() * np.array(s.state.d_ref_from_frame[3:]) + for p, s in zip(poses, ego_states) ] speeds = np.array([np.linalg.norm(v) for v in velocities]) @@ -364,7 +335,9 @@ def status_from_speed(speed): category_to_series={ "ego": np.array( [ - Timestamp(secs=s.time_of_validity.seconds, nanos=s.time_of_validity.nanos) + Timestamp( + secs=s.time_of_validity.seconds, nanos=s.time_of_validity.nanos + ) for s in ego_states ] ) @@ -372,12 +345,16 @@ def status_from_speed(speed): ) speed_states = GroupedMetricsData( "speed_states", - category_to_series={"ego": np.array([status_from_speed(s).name for s in speeds.series])}, + category_to_series={ + "ego": np.array([status_from_speed(s).name for s in speeds.series]) + }, index_data=timestamps, ) speed_states_status = GroupedMetricsData( "speed_states_status", - category_to_series={"ego": np.array([status_from_speed(s) for s in speeds.series])}, + category_to_series={ + "ego": np.array([status_from_speed(s) for s in speeds.series]) + }, index_data=timestamps, ) @@ -510,19 +487,21 @@ def pose_from_state(state): break -def write_proto(writer, metrics_path): +def write_proto(writer): """Write out the binproto for our metrics""" metrics_proto = writer.write() validate_job_metrics(metrics_proto.metrics_msg) # Known location where the runner looks for metrics - with open(metrics_path, "wb") as f: + with open(METRICS_PATH, "wb") as f: f.write(metrics_proto.metrics_msg.SerializeToString()) async def maybe_batch_metrics(): """Run batch metrics if the config is present.""" if BATCH_METRICS_CONFIG_PATH.is_file(): - with open(BATCH_METRICS_CONFIG_PATH, "r", encoding="utf-8") as metrics_config_file: + with open( + BATCH_METRICS_CONFIG_PATH, "r", encoding="utf-8" + ) as metrics_config_file: metrics_config = json.load(metrics_config_file) await compute_batch_metrics( token=metrics_config["authToken"], @@ -536,35 +515,13 @@ async def maybe_batch_metrics(): async def main(): - parser = argparse.ArgumentParser(description="Parse file paths with kebab-style flags.") - parser.add_argument( - "--log-path", - default="/tmp/resim/inputs/logs/resim_log.mcap", - help="Path to the log file (default: /tmp/resim/inputs/logs/resim_log.mcap)", - ) - parser.add_argument( - "--experience-path", - default="/tmp/resim/inputs/experience/experience.sim", - help="Path to the experience file (default: /tmp/resim/inputs/experience/experience.sim)", - ) - parser.add_argument( - "--metrics-path", - default="/tmp/resim/outputs/metrics.binproto", - help="Path to the metrics file (default: /tmp/resim/outputs/metrics.binproto)", - ) - args = parser.parse_args() - await maybe_batch_metrics() - log = load_log(args.log_path) - experience = load_experience(args.experience_path) + log = load_log() metrics_writer = ResimMetricsWriter(uuid.uuid4()) # Make metrics writer! - - out_dir = Path(args.metrics_path).parent - ego_metrics(metrics_writer, experience, log, out_dir) - average_distance_metric(metrics_writer, log) - write_proto(metrics_writer, args.metrics_path) + ego_metrics(metrics_writer, log) + write_proto(metrics_writer) if __name__ == "__main__": From 972d1e74afb009e8f6246b3b725baef1a8c9e2ef Mon Sep 17 00:00:00 2001 From: Michael Bauer Date: Fri, 31 Jan 2025 20:33:13 +0000 Subject: [PATCH 5/5] factor out helpers --- scripts/builds_config.py | 53 ++++++++ scripts/create_builds.py | 280 ++++++++------------------------------- scripts/utils.py | 157 ++++++++++++++++++++++ 3 files changed, 262 insertions(+), 228 deletions(-) create mode 100644 scripts/builds_config.py create mode 100644 scripts/utils.py diff --git a/scripts/builds_config.py b/scripts/builds_config.py new file mode 100644 index 0000000..bc57a92 --- /dev/null +++ b/scripts/builds_config.py @@ -0,0 +1,53 @@ +from pydantic import BaseModel + + +class ResimAppConfig(BaseModel): + client_id: str + auth_url: str + api_url: str + + +class RegistryAuth(BaseModel): + profile: str + + +class ImageRegistry(BaseModel): + account_id: str + region: str + auth: RegistryAuth + + +class ImageRepo(BaseModel): + name: str + registry: str + + +class BuildCommand(BaseModel): + path: str + + +class ExperienceBuild(BaseModel): + description: str + repo: ImageRepo + version_tag_prefix: str + system: str + branch: str + version: str + build_command: BuildCommand + + +class MetricsBuild(BaseModel): + name: str + repo: ImageRepo + version_tag_prefix: str + systems: list[str] + version: str + build_command: BuildCommand + + +class Builds(BaseModel): + project: str + registries: dict[str, ImageRegistry] + resim_app_config: ResimAppConfig + experience_build_configs: dict[str, ExperienceBuild] + metrics_build_configs: dict[str, MetricsBuild] diff --git a/scripts/create_builds.py b/scripts/create_builds.py index 4d1a2ec..b803eb3 100755 --- a/scripts/create_builds.py +++ b/scripts/create_builds.py @@ -1,137 +1,29 @@ #!/bin/python import argparse -import base64 import logging -from http import HTTPStatus -import boto3 import docker -import git import yaml from docker.client import DockerClient -from pydantic import BaseModel -from resim.auth.python.device_code_client import DeviceCodeClient -from resim.metrics.fetch_all_pages import fetch_all_pages -from resim_python_client.api.builds import create_build_for_branch -from resim_python_client.api.metrics_builds import create_metrics_build -from resim_python_client.api.projects import ( - create_branch_for_project, - list_branches_for_project, - list_projects, -) -from resim_python_client.api.systems import add_system_to_metrics_build, list_systems -from resim_python_client.client import AuthenticatedClient -from resim_python_client.models import ( - BranchType, - CreateBranchInput, - CreateBuildForBranchInput, - CreateMetricsBuildInput, + +from scripts.utils import ( + get_client, + get_project, + register_experience_build, + register_metrics_build, + get_systems, + get_branches, + docker_ecr_auth, + parse_version, ) +from scripts.builds_config import Builds, ImageRegistry, MetricsBuild, ExperienceBuild + logger = logging.getLogger("create_builds") logger.setLevel(logging.INFO) -class ResimAppConfig(BaseModel): - client_id: str - auth_url: str - api_url: str - - -class RegistryAuth(BaseModel): - profile: str - - -class ImageRegistry(BaseModel): - account_id: str - region: str - auth: RegistryAuth - - -class ImageRepo(BaseModel): - name: str - registry: str - - -class BuildCommand(BaseModel): - path: str - - -class ExperienceBuild(BaseModel): - description: str - repo: ImageRepo - version_tag_prefix: str - system: str - branch: str - version: str - build_command: BuildCommand - - -class MetricsBuild(BaseModel): - name: str - repo: ImageRepo - version_tag_prefix: str - systems: list[str] - version: str - build_command: BuildCommand - - -class Builds(BaseModel): - project: str - registries: dict[str, ImageRegistry] - resim_app_config: ResimAppConfig - experience_build_configs: dict[str, ExperienceBuild] - metrics_build_configs: dict[str, MetricsBuild] - - -def get_version(version: str) -> str: - if version == "auto": - repo = git.Repo(search_parent_directories=True) - return repo.head.object.hexsha - return version - - -def get_branch(branch: str) -> str: - if branch == "auto": - repo = git.Repo(search_parent_directories=True) - return repo.active_branch.name - return branch - - -def docker_ecr_auth(client: DockerClient, registry: ImageRegistry): - session = boto3.Session(profile_name=registry.auth.profile) - ecr_client = session.client("ecr", region_name=registry.region) - token = ecr_client.get_authorization_token() - password = ( - base64.b64decode(token["authorizationData"][0]["authorizationToken"]) - .decode() - .split(":")[1] - ) - registry_url = f"{registry.account_id}.dkr.ecr.{registry.region}.amazonaws.com" - client.login(username="AWS", password=password, registry=registry_url) - logger.info("Successfully authenticated to %s.", registry_url) - - -def get_client(config: ResimAppConfig) -> AuthenticatedClient: - auth_client = DeviceCodeClient(domain=config.auth_url, client_id=config.client_id) - token = auth_client.get_jwt()["access_token"] - client = AuthenticatedClient(base_url=config.api_url, token=token) - - return client - - -def get_project(project: str, client: AuthenticatedClient) -> str: - project_pages = fetch_all_pages(list_projects.sync, client=client) - projects = {p.name: p for page in project_pages for p in page.projects} - return projects[project] - - -def open_config() -> dict: - with open("builds.yaml", "r", encoding="utf-8") as f: - return yaml.load(f, Loader=yaml.SafeLoader) - - def list_command(builds, args): logger.info("Experience Builds:") for build in builds.experience_build_configs: @@ -142,92 +34,32 @@ def list_command(builds, args): logger.info(" %s", build) -def get_systems(client: AuthenticatedClient, project_id: str) -> dict[str, str]: - system_pages = fetch_all_pages( - list_systems.sync, client=client, project_id=project_id +def build_image( + build: ExperienceBuild | MetricsBuild, + registries: dict[str, ImageRegistry], + docker_client: DockerClient, +) -> str: + repo = build.repo + registry = registries[repo.registry] + docker_ecr_auth(docker_client, registry) + command_path = build.build_command.path + full_repo_name = ( + f"{registry.account_id}.dkr.ecr.{registry.region}.amazonaws.com/{repo.name}" ) - return {p.name: p.system_id for page in system_pages for p in page.systems} + version = parse_version(build.version) + tag = f"{build.version_tag_prefix}{version}" + uri = f"{full_repo_name}:{tag}" + response = docker_client.api.build(path=command_path, tag=uri, decode=True) + for line in response: + logger.info(" ".join((str(v) for v in line.values()))) - -def get_branches(client: AuthenticatedClient, project_id: str) -> dict[str, str]: - branch_pages = fetch_all_pages( - list_branches_for_project.sync, client=client, project_id=project_id - ) - return {p.name: p.branch_id for page in branch_pages for p in page.branches} + return uri -def make_branch(client: AuthenticatedClient, project_id: str, branch: str): - response = create_branch_for_project.sync( - project_id=project_id, - client=client, - body=CreateBranchInput(branch_type=BranchType.CHANGE_REQUEST, name=branch), - ) - assert response is not None, "Failed to make branch" - logger.info("Registered branch with id %s", response.branch_id) - - return response.branch_id - - -def register_experience_build( - client: AuthenticatedClient, - project_id: str, - build: ExperienceBuild, - uri: str, - systems: dict[str, str], - branches: dict[str, str], -): - branch = get_branch(build.branch) - version = get_version(build.version) - if branch not in branches: - branches[branch] = make_branch(client, project_id, branch) - - response = create_build_for_branch.sync( - project_id=project_id, - branch_id=branches[branch], - client=client, - body=CreateBuildForBranchInput( - image_uri=uri, - system_id=systems[build.system], - version=version, - description=build.description, - ), - ) - assert response is not None - logger.info("Registered experience build with id %s", response.build_id) - - -def register_metrics_build( - client: AuthenticatedClient, - project_id: str, - build: MetricsBuild, - uri: str, - systems: dict[str, str], -): - version = get_version(build.version) - response = create_metrics_build.sync( - project_id=project_id, - client=client, - body=CreateMetricsBuildInput( - image_uri=uri, - name=build.name, - version=version, - ), - ) - assert response is not None - logger.info("Registered metrics build with id %s", response.metrics_build_id) - metrics_build_id = response.metrics_build_id - - for system in build.systems: - response = add_system_to_metrics_build.sync_detailed( - project_id=project_id, - system_id=systems[system], - metrics_build_id=metrics_build_id, - client=client, - ) - assert ( - response.status_code == HTTPStatus.CREATED - ), "Failed to add metrics build to system" - logger.info("Added metrics build %s to %s system", metrics_build_id, system) +def push_image(uri: str, docker_client: DockerClient): + response = docker_client.api.push(uri, stream=True, decode=True) + for line in response: + logger.info(" ".join((str(v) for v in line.values()))) def build_push(builds, args, *, push: bool): @@ -240,39 +72,29 @@ def build_push(builds, args, *, push: bool): combined_map = builds.experience_build_configs | builds.metrics_build_configs for target in args.target_builds: + if target not in builds.experience_build_configs: + continue build = combined_map[target] - repo = build.repo - registry = builds.registries[repo.registry] + uri = build_image(build, builds.registries, docker_client) + + if not push: + continue - docker_ecr_auth(docker_client, registry) - command_path = build.build_command.path + push_image(uri, docker_client) + register_experience_build(client, project_id, build, uri, systems, branches) - full_repo_name = ( - f"{registry.account_id}.dkr.ecr.{registry.region}.amazonaws.com/{repo.name}" - ) - version = get_version(build.version) - tag = f"{build.version_tag_prefix}{version}" - uri = f"{full_repo_name}:{tag}" + for target in args.target_builds: + if target not in builds.metrics_build_configs: + continue - response = docker_client.api.build(path=command_path, tag=uri, decode=True) - for line in response: - logger.info(" ".join((str(v) for v in line.values()))) + build = combined_map[target] + uri = build_image(build, builds.registries, docker_client) if not push: continue - response = docker_client.api.push( - repository=full_repo_name, tag=tag, stream=True, decode=True - ) - for line in response: - logger.info(" ".join((str(v) for v in line.values()))) - - if isinstance(build, ExperienceBuild): - register_experience_build(client, project_id, build, uri, systems, branches) - elif isinstance(build, MetricsBuild): - register_metrics_build(client, project_id, build, uri, systems) - else: - raise RuntimeError("Bad build type!") + push_image(uri, docker_client) + register_metrics_build(client, project_id, build, uri, systems) def push_command(builds, args): @@ -305,7 +127,9 @@ def main(): push_parser.set_defaults(func=push_command) args = parser.parse_args() - builds = Builds(**open_config()) + + with open("builds.yaml", "r", encoding="utf-8") as f: + builds = Builds(**yaml.load(f, Loader=yaml.SafeLoader)) args.func(builds, args) # Call the appropriate function diff --git a/scripts/utils.py b/scripts/utils.py new file mode 100644 index 0000000..2fdb968 --- /dev/null +++ b/scripts/utils.py @@ -0,0 +1,157 @@ +import base64 +import boto3 +import git +from docker.client import DockerClient +from resim.auth.python.device_code_client import DeviceCodeClient +from http import HTTPStatus + +from resim.metrics.fetch_all_pages import fetch_all_pages +from resim_python_client.api.builds import create_build_for_branch +from resim_python_client.api.metrics_builds import create_metrics_build + +from resim_python_client.client import AuthenticatedClient +from scripts.builds_config import ImageRegistry, ResimAppConfig, ExperienceBuild, MetricsBuild +from resim.metrics.fetch_all_pages import fetch_all_pages +import logging + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +from resim_python_client.api.projects import ( + create_branch_for_project, + list_branches_for_project, + list_projects, +) +from resim_python_client.api.systems import add_system_to_metrics_build, list_systems +from resim_python_client.client import AuthenticatedClient +from resim_python_client.models import ( + BranchType, + CreateBranchInput, + CreateBuildForBranchInput, + CreateMetricsBuildInput, +) + + +def parse_version(version: str) -> str: + if version == "auto": + repo = git.Repo(search_parent_directories=True) + return repo.head.object.hexsha + return version + + +def parse_branch(branch: str) -> str: + if branch == "auto": + repo = git.Repo(search_parent_directories=True) + return repo.active_branch.name + return branch + + +def docker_ecr_auth(client: DockerClient, registry: ImageRegistry): + session = boto3.Session(profile_name=registry.auth.profile) + ecr_client = session.client("ecr", region_name=registry.region) + token = ecr_client.get_authorization_token() + password = ( + base64.b64decode(token["authorizationData"][0]["authorizationToken"]).decode().split(":")[1] + ) + registry_url = f"{registry.account_id}.dkr.ecr.{registry.region}.amazonaws.com" + client.login(username="AWS", password=password, registry=registry_url) + logger.info("Successfully authenticated to %s.", registry_url) + + +def get_client(config: ResimAppConfig) -> AuthenticatedClient: + auth_client = DeviceCodeClient(domain=config.auth_url, client_id=config.client_id) + token = auth_client.get_jwt()["access_token"] + client = AuthenticatedClient(base_url=config.api_url, token=token) + + return client + + +def get_project(project: str, client: AuthenticatedClient) -> str: + project_pages = fetch_all_pages(list_projects.sync, client=client) + projects = {p.name: p for page in project_pages for p in page.projects} + return projects[project] + + +def get_systems(client: AuthenticatedClient, project_id: str) -> dict[str, str]: + system_pages = fetch_all_pages(list_systems.sync, client=client, project_id=project_id) + return {p.name: p.system_id for page in system_pages for p in page.systems} + + +def get_branches(client: AuthenticatedClient, project_id: str) -> dict[str, str]: + branch_pages = fetch_all_pages( + list_branches_for_project.sync, client=client, project_id=project_id + ) + return {p.name: p.branch_id for page in branch_pages for p in page.branches} + + +def make_branch(client: AuthenticatedClient, project_id: str, branch: str): + response = create_branch_for_project.sync( + project_id=project_id, + client=client, + body=CreateBranchInput(branch_type=BranchType.CHANGE_REQUEST, name=branch), + ) + assert response is not None, "Failed to make branch" + logger.info("Registered branch with id %s", response.branch_id) + + return response.branch_id + + +def register_experience_build( + client: AuthenticatedClient, + project_id: str, + build: ExperienceBuild, + uri: str, + systems: dict[str, str], + branches: dict[str, str], +): + branch = parse_branch(build.branch) + version = parse_version(build.version) + if branch not in branches: + branches[branch] = make_branch(client, project_id, branch) + + response = create_build_for_branch.sync( + project_id=project_id, + branch_id=branches[branch], + client=client, + body=CreateBuildForBranchInput( + image_uri=uri, + system_id=systems[build.system], + version=version, + description=build.description, + ), + ) + assert response is not None + logger.info("Registered experience build with id %s", response.build_id) + + +def register_metrics_build( + client: AuthenticatedClient, + project_id: str, + build: MetricsBuild, + uri: str, + systems: dict[str, str], +): + version = parse_version(build.version) + response = create_metrics_build.sync( + project_id=project_id, + client=client, + body=CreateMetricsBuildInput( + image_uri=uri, + name=build.name, + version=version, + ), + ) + assert response is not None + logger.info("Registered metrics build with id %s", response.metrics_build_id) + metrics_build_id = response.metrics_build_id + + for system in build.systems: + response = add_system_to_metrics_build.sync_detailed( + project_id=project_id, + system_id=systems[system], + metrics_build_id=metrics_build_id, + client=client, + ) + assert response.status_code == HTTPStatus.CREATED, "Failed to add metrics build to system" + logger.info("Added metrics build %s to %s system", metrics_build_id, system)