Skip to content

Commit

Permalink
feat: Add in new support bundle work items (#490)
Browse files Browse the repository at this point in the history
  • Loading branch information
vilit1 authored Feb 6, 2025
1 parent a4e8166 commit e77ea2d
Show file tree
Hide file tree
Showing 11 changed files with 97 additions and 11 deletions.
24 changes: 19 additions & 5 deletions .github/workflows/int_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -229,11 +229,15 @@ jobs:
run: |
python -m pip install tox
tox r -vv -e python-init-int --notest
- name: "Tox test environment setup for integration tests"
- name: "Tox test environment setup for edge integration tests"
if: ${{ matrix.feature == 'default' && !inputs.use-container }}
run: |
tox r -vv -e "python-all-int" --notest
- name: "Tox test environment setup for integration tests"
tox r -vv -e "python-edge-int" --notest
- name: "Tox test environment setup for loud integration tests"
if: ${{ matrix.feature == 'default' && !inputs.use-container }}
run: |
tox r -vv -e "python-rpsaas-int" --notest
- name: "Tox test environment setup for Workload Identity integration tests"
if: ${{ matrix.feature == 'secretsync' && !inputs.use-container }}
run: |
tox r -vv -e "python-secretsync-int" --notest
Expand Down Expand Up @@ -269,14 +273,24 @@ jobs:
azext_edge_init_redeployment: false
run: |
tox r -e python-init-int --skip-pkg-install -- --durations=0
- name: "Tox Integration Tests"
- name: "Tox Edge Integration Tests"
if: ${{ matrix.feature == 'default' && !inputs.use-container }}
env:
azext_edge_rg: ${{ steps.env_out.outputs.RESOURCE_GROUP }}
azext_edge_cluster: ${{ steps.env_out.outputs.CLUSTER_NAME }}
azext_edge_instance: ${{ steps.env_out.outputs.INSTANCE_NAME }}
run: |
tox r -e "python-all-int" --skip-pkg-install -- --durations=0 --dist=loadfile -n auto
tox r -e "python-edge-int" --skip-pkg-install -- --durations=0 --dist=loadfile -n auto
coverage=$(jq .totals.percent_covered coverage.json | cut -c1-4)
echo "Code coverage: $coverage%" >> $GITHUB_STEP_SUMMARY
- name: "Tox Cloud Integration Tests"
if: ${{ matrix.feature == 'syncrules' && !inputs.use-container }}
env:
azext_edge_rg: ${{ steps.env_out.outputs.RESOURCE_GROUP }}
azext_edge_cluster: ${{ steps.env_out.outputs.CLUSTER_NAME }}
azext_edge_instance: ${{ steps.env_out.outputs.INSTANCE_NAME }}
run: |
tox r -e "python-rpsaas-int" --skip-pkg-install -- --durations=0 --dist=loadfile -n auto
coverage=$(jq .totals.percent_covered coverage.json | cut -c1-4)
echo "Code coverage: $coverage%" >> $GITHUB_STEP_SUMMARY
- name: "Tox Workload Identity Integration Tests"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ def _assert_above_min(param: str, value: int, minimum: int = 0) -> str:
def _raise_if_connector_error(connector_type: str, error_msg: str):
if error_msg:
raise InvalidArgumentValueError(
f"The following {connector_type} connector arguments are invalid:\n {error_msg}"
f"The following {connector_type} connector arguments are invalid:\n{error_msg}"
)


Expand Down
9 changes: 9 additions & 0 deletions azext_edge/edge/providers/support/dataflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
assemble_crd_work,
process_deployments,
process_replicasets,
process_services,
process_v1_pods,
)
from .common import NAME_LABEL_FORMAT
Expand Down Expand Up @@ -52,6 +53,13 @@ def fetch_replicasets():
)


def fetch_services():
return process_services(
directory_path=DATAFLOW_DIRECTORY_PATH,
label_selector=DATAFLOW_NAME_LABEL,
)


def fetch_pods(since_seconds: int = DAY_IN_SECONDS):
return process_v1_pods(
directory_path=DATAFLOW_DIRECTORY_PATH,
Expand All @@ -62,6 +70,7 @@ def fetch_pods(since_seconds: int = DAY_IN_SECONDS):

support_runtime_elements = {
"deployments": fetch_deployments,
"services": fetch_services,
"replicasets": fetch_replicasets,
}

Expand Down
18 changes: 18 additions & 0 deletions azext_edge/edge/providers/support/mq.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
DAY_IN_SECONDS,
assemble_crd_work,
get_mq_namespaces,
process_config_maps,
process_daemonsets,
process_jobs,
process_replicasets,
process_services,
process_statefulset,
Expand Down Expand Up @@ -86,6 +88,20 @@ def fetch_replicasets():
)


def fetch_jobs():
return process_jobs(
directory_path=MQ_DIRECTORY_PATH,
label_selector=MQ_NAME_LABEL,
)


def fetch_configmaps():
return process_config_maps(
directory_path=MQ_DIRECTORY_PATH,
label_selector=MQ_NAME_LABEL,
)


def fetch_pods(since_seconds: int = DAY_IN_SECONDS):
return process_v1_pods(
directory_path=MQ_DIRECTORY_PATH,
Expand All @@ -96,6 +112,8 @@ def fetch_pods(since_seconds: int = DAY_IN_SECONDS):

support_runtime_elements = {
"statefulsets": fetch_statefulsets,
"configmaps": fetch_configmaps,
"jobs": fetch_jobs,
"replicasets": fetch_replicasets,
"services": fetch_services,
"daemonsets": fetch_daemonsets,
Expand Down
9 changes: 9 additions & 0 deletions azext_edge/edge/providers/support/opcua.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from .base import (
DAY_IN_SECONDS,
assemble_crd_work,
process_config_maps,
process_deployments,
process_services,
process_v1_pods,
Expand Down Expand Up @@ -84,6 +85,13 @@ def fetch_services():
return processed


def fetch_configmaps():
return process_config_maps(
directory_path=OPC_DIRECTORY_PATH,
label_selector=OPCUA_NAME_LABEL,
)


def fetch_daemonsets():
processed = process_daemonsets(
directory_path=OPC_DIRECTORY_PATH,
Expand All @@ -100,6 +108,7 @@ def fetch_daemonsets():

support_runtime_elements = {
"daemonsets": fetch_daemonsets,
"configmaps": fetch_configmaps,
"deployments": fetch_deployments,
"replicasets": fetch_replicasets,
"services": fetch_services,
Expand Down
12 changes: 11 additions & 1 deletion azext_edge/tests/edge/support/create_bundle_int/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,7 @@ def get_file_map(
c_namespace = namespaces.get("usage_system")
certmanager_namespace = namespaces.get("certmanager")
osm_namespace = namespaces.get("osm")
ops_path = None

if aio_namespace:
walk_result.pop(path.join(BASE_ZIP_PATH, aio_namespace))
Expand Down Expand Up @@ -502,12 +503,21 @@ def run_bundle_command(


def split_name(name: str) -> List[str]:
"""
Splits a name by the .'s.
If a number is present (ex: versioning like 1.0.0-preview), do not split that portion.
Make sure the extension is split out (last . for the extension).
"""
first_pass = name.split(".")
second_pass = []
for i in range(len(first_pass)):
# we should not need to worry about trying to access too early
# since the first part should be the workload type (ex: pod)
if first_pass[i].isnumeric() or first_pass[i - 1].isnumeric():
if all([
i != (len(first_pass) - 1),
first_pass[i].isnumeric() or first_pass[i - 1].isnumeric()
]):
second_pass[-1] = f"{second_pass[-1]}.{first_pass[i]}"
else:
second_pass.append(first_pass[i])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def test_create_bundle_dataflow(init_setup, tracked_files):
resource_api=DATAFLOW_API_V1
)

expected_workload_types = ["deployment", "pod", "replicaset"]
expected_workload_types = ["deployment", "pod", "replicaset", "service"]
expected_types = set(expected_workload_types).union(DATAFLOW_API_V1.kinds)
assert set(file_map.keys()).issubset(expected_types)
check_workload_resource_files(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@
"pod.aio-job-0.1.0-preview-0.42.0-instance.aio-upgrade-status-job.log",
["pod", "aio-job-0.1.0-preview-0.42.0-instance", "aio-upgrade-status-job", "log"]
],
[
"pod.aio-job-0.1.0-preview-0.42.0.log",
["pod", "aio-job-0.1.0-preview-0.42.0", "log"]
]
])
def test_split_name(input: str, expected: List[str]):
result = split_name(input)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,9 @@ def test_create_bundle_mq(init_setup, tracked_files, mq_traces):

check_custom_resource_files(file_objs=file_map, resource_api=MQ_ACTIVE_API)

expected_workload_types = ["pod", "daemonset", "replicaset", "service", "statefulset"]
expected_workload_types = [
"pod", "daemonset", "replicaset", "service", "statefulset", "job", "configmap"
]
expected_types = set(expected_workload_types).union(MQ_ACTIVE_API.kinds)
assert set(file_map.keys()).issubset(expected_types)

Expand Down Expand Up @@ -78,6 +80,7 @@ def test_create_bundle_mq(init_setup, tracked_files, mq_traces):
check_workload_resource_files(
file_objs=file_map,
expected_workload_types=expected_workload_types,
prefixes=["aio-broker", "aio-dmqtt"],
prefixes=["aio-broker", "aio-dmqtt", "otel-collector-service"],
bundle_path=bundle_path,
expected_label=("app.kubernetes.io/name", "microsoft-iotoperations-mqttbroker")
)
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def test_create_bundle_opcua(init_setup, tracked_files):
resource_api=OPCUA_API_V1
)

expected_workload_types = ["daemonset", "deployment", "pod", "replicaset", "service"]
expected_workload_types = ["daemonset", "deployment", "pod", "replicaset", "service", "configmap"]
optional_workload_types = ["podmetric"]
expected_types = set(expected_workload_types + optional_workload_types).union(OPCUA_API_V1.kinds)
assert set(file_map.keys()).issubset(expected_types)
Expand Down
19 changes: 19 additions & 0 deletions azext_edge/tests/edge/support/test_support_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ def test_create_bundle(
mocked_os_makedirs,
mocked_zipfile,
mocked_get_custom_objects,
mocked_list_config_maps,
mocked_list_cron_jobs,
mocked_list_jobs,
mocked_list_deployments,
Expand Down Expand Up @@ -203,6 +204,12 @@ def test_create_bundle(
label_selector=MQ_NAME_LABEL,
directory_path=MQ_DIRECTORY_PATH,
)
assert_list_config_maps(
mocked_client,
mocked_zipfile,
label_selector=MQ_NAME_LABEL,
directory_path=MQ_DIRECTORY_PATH,
)

if api in [OPCUA_API_V1]:
# Assert runtime resources
Expand Down Expand Up @@ -284,6 +291,12 @@ def test_create_bundle(
label_selector=OPCUA_NAME_LABEL,
directory_path=OPC_DIRECTORY_PATH,
)
assert_list_config_maps(
mocked_client,
mocked_zipfile,
label_selector=OPCUA_NAME_LABEL,
directory_path=OPC_DIRECTORY_PATH,
)
assert_list_services(
mocked_client,
mocked_zipfile,
Expand Down Expand Up @@ -315,6 +328,12 @@ def test_create_bundle(
)

if api in [DATAFLOW_API_V1]:
assert_list_services(
mocked_client,
mocked_zipfile,
label_selector=DATAFLOW_API_V1.label,
directory_path=DATAFLOW_API_V1.moniker,
)
assert_list_deployments(
mocked_client,
mocked_zipfile,
Expand Down

0 comments on commit e77ea2d

Please sign in to comment.