Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Monitor] Apply black formatting #38129

Merged
merged 2 commits into from
Oct 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,7 @@ def _upload( # pylint: disable=inconsistent-return-statements
content_encoding: Optional[str] = None,
content_type: str = "application/json",
**kwargs: Any
) -> None:
...
) -> None: ...

@overload
def _upload( # pylint: disable=inconsistent-return-statements
Expand All @@ -94,8 +93,7 @@ def _upload( # pylint: disable=inconsistent-return-statements
content_encoding: Optional[str] = None,
content_type: str = "application/json",
**kwargs: Any
) -> None:
...
) -> None: ...

@distributed_trace
def _upload( # pylint: disable=inconsistent-return-statements
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,7 @@ async def _upload( # pylint: disable=inconsistent-return-statements
content_encoding: Optional[str] = None,
content_type: str = "application/json",
**kwargs: Any
) -> None:
...
) -> None: ...

@overload
async def _upload( # pylint: disable=inconsistent-return-statements
Expand All @@ -59,8 +58,7 @@ async def _upload( # pylint: disable=inconsistent-return-statements
content_encoding: Optional[str] = None,
content_type: str = "application/json",
**kwargs: Any
) -> None:
...
) -> None: ...

@distributed_trace_async
async def _upload( # pylint: disable=inconsistent-return-statements
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,10 @@ async def on_error_pass(_) -> None:
# Sample callback that raises the error if it corresponds to a specific HTTP error code.
# This aborts the rest of the upload.
async def on_error_abort(error: LogsUploadError) -> None:
if isinstance(error.error, HttpResponseError) and cast(HttpResponseError, error.error).status_code in (400, 401, 403):
print("Aborting upload...")
raise error.error
if isinstance(error.error, HttpResponseError):
if cast(HttpResponseError, error.error).status_code in (400, 401, 403):
print("Aborting upload...")
raise error.error

client = LogsIngestionClient(endpoint=endpoint, credential=credential, logging_enable=True)
async with client:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@
{"Time": "2021-12-08T23:51:14.1104269Z", "Computer": "Computer2", "AdditionalContext": "context"},
]

failed_logs: List[MutableMapping[str, str]] = []
failed_logs: List[MutableMapping[str, str]] = []


# Sample callback that stores the logs that failed to upload.
def on_error_save(error: LogsUploadError) -> None:
Expand All @@ -61,9 +62,10 @@ def on_error_pass(_) -> None:
# Sample callback that raises the error if it corresponds to a specific HTTP error code.
# This aborts the rest of the upload.
def on_error_abort(error: LogsUploadError) -> None:
if isinstance(error.error, HttpResponseError) and cast(HttpResponseError, error.error).status_code in (400, 401, 403):
print("Aborting upload...")
raise error.error
if isinstance(error.error, HttpResponseError):
if cast(HttpResponseError, error.error).status_code in (400, 401, 403):
print("Aborting upload...")
raise error.error


client.upload(rule_id=rule_id, stream_name=os.environ["LOGS_DCR_STREAM_NAME"], logs=body, on_error=on_error_save)
Expand Down
86 changes: 44 additions & 42 deletions sdk/monitor/azure-monitor-ingestion/setup.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/bin/env python

#-------------------------------------------------------------------------
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
# --------------------------------------------------------------------------

import re
import os.path
Expand All @@ -16,76 +16,78 @@
PACKAGE_PPRINT_NAME = "Azure Monitor Ingestion"

# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
package_folder_path = PACKAGE_NAME.replace("-", "/")
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
namespace_name = PACKAGE_NAME.replace("-", ".")

# azure v0.x is not compatible with this package
# azure v0.x used to have a __version__ attribute (newer versions don't)
try:
import azure

try:
ver = azure.__version__
raise Exception(
'This package is incompatible with azure=={}. '.format(ver) +
'Uninstall it with "pip uninstall azure".'
"This package is incompatible with azure=={}. ".format(ver) + 'Uninstall it with "pip uninstall azure".'
)
except AttributeError:
pass
except ImportError:
pass

# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
with open(
(
os.path.join(package_folder_path, "version.py")
if os.path.exists(os.path.join(package_folder_path, "version.py"))
else os.path.join(package_folder_path, "_version.py")
),
"r",
) as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1)

if not version:
raise RuntimeError('Cannot find version information')
raise RuntimeError("Cannot find version information")

with open('README.md', encoding='utf-8') as f:
with open("README.md", encoding="utf-8") as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
with open("CHANGELOG.md", encoding="utf-8") as f:
changelog = f.read()

setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + changelog,
long_description_content_type='text/markdown',
license='MIT License',
author='Microsoft Corporation',
author_email='[email protected]',
url='https://github.com/Azure/azure-sdk-for-python',
description="Microsoft {} Client Library for Python".format(PACKAGE_PPRINT_NAME),
long_description=readme + "\n\n" + changelog,
long_description_content_type="text/markdown",
license="MIT License",
author="Microsoft Corporation",
author_email="[email protected]",
url="https://github.com/Azure/azure-sdk-for-python",
keywords="azure, azure sdk",
classifiers=[
"Development Status :: 5 - Production/Stable",
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'Programming Language :: Python :: 3.12',
'License :: OSI Approved :: MIT License',
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"License :: OSI Approved :: MIT License",
],
python_requires=">=3.8",
zip_safe=False,
packages=find_packages(exclude=[
'tests',
'samples',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.monitor',
]),
packages=find_packages(
exclude=[
"tests",
"samples",
# Exclude packages that will be covered by PEP420 or nspkg
"azure",
"azure.monitor",
]
),
include_package_data=True,
install_requires=[
'azure-core>=1.28.0',
'isodate>=0.6.0',
"typing-extensions>=4.0.1"
]
install_requires=["azure-core>=1.28.0", "isodate>=0.6.0", "typing-extensions>=4.0.1"],
)
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
AUDIENCE_MAP = {
"AzureCloud": "https://monitor.azure.com",
"AzureChinaCloud": "https://monitor.azure.cn",
"AzureUSGovernment": "https://monitor.azure.us"
"AzureUSGovernment": "https://monitor.azure.us",
}


Expand Down
16 changes: 6 additions & 10 deletions sdk/monitor/azure-monitor-ingestion/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
add_general_regex_sanitizer,
add_header_regex_sanitizer,
set_custom_default_matcher,
add_oauth_response_sanitizer
add_oauth_response_sanitizer,
)


Expand Down Expand Up @@ -60,15 +60,14 @@ def add_sanitizers(test_proxy, environment_variables):
ENV_CLIENT_SECRET: TEST_ID,
ENV_DCE: TEST_DCE,
ENV_STREAM_NAME: TEST_STREAM_NAME,
ENV_DCR_ID: TEST_ID
ENV_DCR_ID: TEST_ID,
}
environment_variables.sanitize_batch(sanitization_mapping)
set_custom_default_matcher(
compare_bodies=False, excluded_headers="Authorization,Content-Length,x-ms-client-request-id,x-ms-request-id"
)
add_general_regex_sanitizer(
value="fakeresource",
regex="(?<=\\/\\/)[a-z-]+(?=\\.westus2-1\\.ingest\\.monitor\\.azure\\.com)"
value="fakeresource", regex="(?<=\\/\\/)[a-z-]+(?=\\.westus2-1\\.ingest\\.monitor\\.azure\\.com)"
)
add_body_key_sanitizer(json_path="access_token", value="fakekey")
add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]")
Expand All @@ -80,19 +79,16 @@ def monitor_info(environment_variables):
yield {
"stream_name": environment_variables.get(ENV_STREAM_NAME),
"dce": environment_variables.get(ENV_DCE),
"dcr_id": environment_variables.get(ENV_DCR_ID)
"dcr_id": environment_variables.get(ENV_DCR_ID),
}


@pytest.fixture(scope="session")
def large_data():
logs = []
content = "a" * (1024 * 100) # 100 KiB string
content = "a" * (1024 * 100) # 100 KiB string

# Ensure total size is > 2 MiB data
for i in range(24):
logs.append({
"Time": datetime.now().isoformat(),
"AdditionalContext": content
})
logs.append({"Time": datetime.now().isoformat(), "AdditionalContext": content})
return logs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#-------------------------------------------------------------------------
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
# --------------------------------------------------------------------------
from datetime import datetime
import json
import random
Expand All @@ -17,8 +17,9 @@

ALPHANUMERIC_CHARACTERS = string.ascii_letters + string.digits


def _get_random_string(length: int):
return ''.join(random.choice(ALPHANUMERIC_CHARACTERS) for _ in range(length))
return "".join(random.choice(ALPHANUMERIC_CHARACTERS) for _ in range(length))


def _get_repeating_string(length: int):
Expand All @@ -37,13 +38,9 @@ def __init__(self, arguments):
self.async_credential = AsyncDefaultAzureCredential()

# Create clients
self.client = LogsIngestionClient(
endpoint=self.data_collection_endpoint,
credential=self.credential
)
self.client = LogsIngestionClient(endpoint=self.data_collection_endpoint, credential=self.credential)
self.async_client = AsyncLogsIngestionClient(
endpoint=self.data_collection_endpoint,
credential=self.async_credential
endpoint=self.data_collection_endpoint, credential=self.async_credential
)

async def close(self):
Expand All @@ -58,36 +55,43 @@ async def setup(self):
# Create log entries to upload
self.logs = []
for i in range(self.args.num_logs):
content = _get_random_string(self.args.log_content_length) if self.args.random_log_content \
content = (
_get_random_string(self.args.log_content_length)
if self.args.random_log_content
else _get_repeating_string(self.args.log_content_length)
self.logs.append({
"Time": datetime.now().isoformat(),
"Computer": f"Computer {i}",
"AdditionalContext": content
})
print(f'{len(json.dumps(self.logs))} bytes of logs to be uploaded.')
)
self.logs.append(
{"Time": datetime.now().isoformat(), "Computer": f"Computer {i}", "AdditionalContext": content}
)
print(f"{len(json.dumps(self.logs))} bytes of logs to be uploaded.")

@staticmethod
def add_arguments(parser):
super(UploadLogsTest, UploadLogsTest).add_arguments(parser)
parser.add_argument("-n", "--num-logs", nargs="?", type=int,
help="Number of logs to be uploaded. Defaults to 100", default=100)
parser.add_argument("-l", "--log-content-length", nargs="?", type=int,
help="Length of the 'AdditionalContext' value for each log entry. Defaults to 20", default=20)
parser.add_argument("-r", "--random-log-content", action="store_true",
parser.add_argument(
"-n", "--num-logs", nargs="?", type=int, help="Number of logs to be uploaded. Defaults to 100", default=100
)
parser.add_argument(
"-l",
"--log-content-length",
nargs="?",
type=int,
help="Length of the 'AdditionalContext' value for each log entry. Defaults to 20",
default=20,
)
parser.add_argument(
"-r",
"--random-log-content",
action="store_true",
help="Whether to use a random alphanumeric string for each 'AdditionalContext' value. "
"If False, uses a repeating 'a' character. Defaults to False", default=False)
"If False, uses a repeating 'a' character. Defaults to False",
default=False,
)

def run_sync(self):
self.client.upload(
rule_id=self.data_collection_rule_id,
stream_name=self.stream_name,
logs=self.logs
)
self.client.upload(rule_id=self.data_collection_rule_id, stream_name=self.stream_name, logs=self.logs)

async def run_async(self):
await self.async_client.upload(
rule_id=self.data_collection_rule_id,
stream_name=self.stream_name,
logs=self.logs
rule_id=self.data_collection_rule_id, stream_name=self.stream_name, logs=self.logs
)
Loading