Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

make logging lazy format #1067

Merged
merged 9 commits into from
Oct 19, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions azure_functions_worker/bindings/datumdef.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,8 @@ def from_rpc_shared_memory(

if ret_val is not None:
logger.info(
f'Read {count} bytes from memory map {mem_map_name} '
f'for data type {data_type}')
'Read %s bytes from memory map %s for data type %s', count,
mem_map_name, data_type)
return ret_val
return None

Expand Down Expand Up @@ -162,8 +162,8 @@ def to_rpc_shared_memory(
)

if shared_mem_meta is None:
logger.warning('Cannot write to shared memory for type: '
f'{datum.type}')
logger.warning('Cannot write to shared memory for type: %s',
datum.type)
return None

shmem = protos.RpcSharedMemory(
Expand All @@ -173,8 +173,9 @@ def to_rpc_shared_memory(
type=data_type)

logger.info(
f'Wrote {shared_mem_meta.count_bytes} bytes to memory map '
f'{shared_mem_meta.mem_map_name} for data type {data_type}')
'Wrote %s bytes to memory map %s for data type %s',
shared_mem_meta.count_bytes, shared_mem_meta.mem_map_name,
data_type)
return shmem


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def open_mem_map(
f'Cannot open memory map. Invalid size {mem_map_size}')
fd = self._open_mem_map_file(mem_map_name)
if fd is None:
logger.warning(f'Cannot open file: {mem_map_name}')
logger.warning('Cannot open file: %s', mem_map_name)
return None
mem_map = mmap.mmap(fd.fileno(), mem_map_size, access=access)
return mem_map
Expand All @@ -58,7 +58,7 @@ def create_mem_map(self, mem_map_name: str, mem_map_size: int) \
f'Cannot create memory map. Invalid size {mem_map_size}')
file = self._create_mem_map_file(mem_map_name, mem_map_size)
if file is None:
logger.warning(f'Cannot create file: {mem_map_name}')
logger.warning('Cannot create file: %s', mem_map_name)
return None
mem_map = mmap.mmap(file.fileno(), mem_map_size, mmap.MAP_SHARED,
mmap.PROT_WRITE)
Expand All @@ -80,7 +80,7 @@ def delete_mem_map(self, mem_map_name: str, mem_map: mmap.mmap) -> bool:
# deletion was unsuccessful.
# These logs can help identify if we may be leaking memory and not
# cleaning up the created memory maps.
logger.error(f'Cannot delete memory map {mem_map_name} - {e}',
logger.error('Cannot delete memory map %s - %s', mem_map_name, e,
exc_info=True)
return False
mem_map.close()
Expand All @@ -96,12 +96,15 @@ def _get_allowed_mem_map_dirs(self) -> List[str]:
allowed_mem_map_dirs_str = get_app_setting(setting)
if allowed_mem_map_dirs_str is None:
allowed_mem_map_dirs = consts.UNIX_TEMP_DIRS
logger.info('Using allowed directories for shared memory: '
f'{allowed_mem_map_dirs} from App Setting: {setting}')
logger.info(
'Using allowed directories for shared memory: %s from App '
'Setting: %s',
allowed_mem_map_dirs, setting)
else:
allowed_mem_map_dirs = allowed_mem_map_dirs_str.split(',')
logger.info('Using default allowed directories for shared memory: '
f'{allowed_mem_map_dirs}')
logger.info(
'Using default allowed directories for shared memory: %s',
allowed_mem_map_dirs)
return allowed_mem_map_dirs

def _get_valid_mem_map_dirs(self) -> List[str]:
Expand All @@ -121,18 +124,20 @@ def _get_valid_mem_map_dirs(self) -> List[str]:
if os.path.exists(dir_path):
# A valid directory already exists
valid_dirs.append(dir_path)
logger.debug(f'Found directory {dir_path} to store memory maps')
logger.debug('Found directory %s to store memory maps',
dir_path)
else:
try:
os.makedirs(dir_path)
valid_dirs.append(dir_path)
except Exception as e:
# We keep trying to check/create others
logger.warning(f'Cannot create directory {dir_path} to '
f'store memory maps - {e}', exc_info=True)
logger.warning('Cannot create directory %s to '
'store memory maps - %s', dir_path, e,
exc_info=True)
if len(valid_dirs) == 0:
logger.error('No valid directory for memory maps in '
f'{allowed_dirs}')
logger.error('No valid directory for memory maps in %s',
allowed_dirs)
return valid_dirs

def _open_mem_map_file(self, mem_map_name: str) -> Optional[BufferedRandom]:
Expand All @@ -149,11 +154,13 @@ def _open_mem_map_file(self, mem_map_name: str) -> Optional[BufferedRandom]:
fd = open(file_path, 'r+b')
return fd
except Exception as e:
logger.error(f'Cannot open file {file_path} - {e}',
logger.error('Cannot open file %s - %s', file_path, e,
exc_info=True)
# The memory map was not found in any of the known directories
logger.error(f'Cannot open memory map {mem_map_name} in any of the '
f'following directories: {self.valid_dirs}')
logger.error(
'Cannot open memory map %s in any of the following directories: '
'%s',
mem_map_name, self.valid_dirs)
return None

def _create_mem_map_file(self, mem_map_name: str, mem_map_size: int) \
Expand All @@ -179,12 +186,13 @@ def _create_mem_map_file(self, mem_map_name: str, mem_map_size: int) \
except Exception as e:
# If the memory map could not be created in this directory, we
# keep trying in other applicable directories.
logger.warning(f'Cannot create memory map in {file_path} - {e}.'
' Trying other directories.', exc_info=True)
logger.warning('Cannot create memory map in %s - %s.'
' Trying other directories.', file_path, e,
exc_info=True)
# Could not create the memory map in any of the applicable directory
# paths so we fail.
logger.error(
f'Cannot create memory map {mem_map_name} with size '
f'{mem_map_size} in any of the following directories: '
f'{self.valid_dirs}')
'Cannot create memory map %s with size %s in any of the '
'following directories: %s',
mem_map_name, mem_map_size, self.valid_dirs)
return None
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ def open_mem_map(
return mem_map
except Exception as e:
logger.warning(
f'Cannot open memory map {mem_map_name} with size '
f'{mem_map_size} - {e}')
'Cannot open memory map %s with size %s - %s', mem_map_name,
mem_map_size, e)
return None

def create_mem_map(self, mem_map_name: str, mem_map_size: int) \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,14 +92,14 @@ def put_bytes(self, content: bytes) -> Optional[SharedMemoryMetadata]:
try:
num_bytes_written = shared_mem_map.put_bytes(content)
except Exception as e:
logger.warning(f'Cannot write {content_length} bytes into shared '
f'memory {mem_map_name} - {e}')
logger.warning('Cannot write %s bytes into shared memory %s - %s',
content_length, mem_map_name, e)
shared_mem_map.dispose()
return None
if num_bytes_written != content_length:
logger.error(
f'Cannot write data into shared memory {mem_map_name} '
f'({num_bytes_written} != {content_length})')
'Cannot write data into shared memory %s (%s != %s)',
mem_map_name, num_bytes_written, content_length)
shared_mem_map.dispose()
return None
self.allocated_mem_maps[mem_map_name] = shared_mem_map
Expand Down Expand Up @@ -128,8 +128,8 @@ def get_bytes(self, mem_map_name: str, offset: int, count: int) \
"""
if offset != 0:
logger.error(
f'Cannot read bytes. Non-zero offset ({offset}) '
f'not supported.')
'Cannot read bytes. Non-zero offset (%s) not supported.',
offset)
return None
shared_mem_map = self._open(mem_map_name, count)
if shared_mem_map is None:
Expand Down Expand Up @@ -168,7 +168,8 @@ def free_mem_map(self, mem_map_name: str,
"""
if mem_map_name not in self.allocated_mem_maps:
logger.error(
f'Cannot find memory map in list of allocations {mem_map_name}')
'Cannot find memory map in list of allocations %s',
mem_map_name)
return False
shared_mem_map = self.allocated_mem_maps[mem_map_name]
success = shared_mem_map.dispose(to_delete_backing_resources)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,9 @@ def put_bytes(self, content: bytes) -> Optional[int]:
content_length_bytes)
if num_content_length_bytes_written != num_content_length_bytes:
logger.error(
f'Cannot write content size to memory map {self.mem_map_name} '
f'({num_content_length_bytes_written} != '
f'{num_content_length_bytes})')
'Cannot write content size to memory map %s (%s != %s)',
self.mem_map_name, num_content_length_bytes_written,
num_content_length_bytes)
return 0
# Write the content
num_content_bytes_written = self.mem_map.write(content)
Expand Down
35 changes: 18 additions & 17 deletions azure_functions_worker/dispatcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,8 +242,8 @@ async def _dispatch_grpc_request(self, request):
# Don't crash on unknown messages. Some of them can be ignored;
# and if something goes really wrong the host can always just
# kill the worker's process.
logger.error(f'unknown StreamingMessage content type '
f'{content_type}')
logger.error('unknown StreamingMessage content type %s',
content_type)
return

resp = await request_handler(request)
Expand Down Expand Up @@ -302,8 +302,8 @@ async def _handle__functions_metadata_request(self, request):

if not os.path.exists(function_path):
# Fallback to legacy model
logger.info(f"{SCRIPT_FILE_NAME} does not exist. "
"Switching to host indexing.")
logger.info("%s does not exist. "
"Switching to host indexing.", SCRIPT_FILE_NAME)
return protos.StreamingMessage(
request_id=request.request_id,
function_metadata_response=protos.FunctionMetadataResponse(
Expand All @@ -327,8 +327,8 @@ async def _handle__functions_metadata_request(self, request):
indexed_function_logs.append(function_log)

logger.info(
f'Successfully processed FunctionMetadataRequest for '
f'functions: {" ".join(indexed_function_logs)}')
'Successfully processed FunctionMetadataRequest for '
'functions: %s', " ".join(indexed_function_logs))

fx_metadata_results = loader.process_indexed_function(
self._functions,
Expand Down Expand Up @@ -378,9 +378,10 @@ async def _handle__function_load_request(self, request):
)

logger.info('Successfully processed FunctionLoadRequest, '
f'request ID: {self.request_id}, '
f'function ID: {function_id},'
f'function Name: {function_name}')
'request ID: %s, '
'function ID: %s,'
'function Name: %s', self.request_id, function_id,
function_name)

return protos.StreamingMessage(
request_id=self.request_id,
Expand Down Expand Up @@ -602,7 +603,7 @@ async def _handle__close_shared_memory_resources_request(self, request):
to_delete_resources)
results[map_name] = success
except Exception as e:
logger.error(f'Cannot free memory map {map_name} - {e}',
logger.error('Cannot free memory map %s - %s', map_name, e,
exc_info=True)
finally:
response = protos.CloseSharedMemoryResourcesResponse(
Expand Down Expand Up @@ -653,16 +654,16 @@ def tp_max_workers_validator(value: str) -> bool:
try:
int_value = int(value)
except ValueError:
logger.warning(f'{PYTHON_THREADPOOL_THREAD_COUNT} must be an '
'integer')
logger.warning('%s must be an integer',
PYTHON_THREADPOOL_THREAD_COUNT)
return False

if int_value < PYTHON_THREADPOOL_THREAD_COUNT_MIN:
logger.warning(f'{PYTHON_THREADPOOL_THREAD_COUNT} must be set '
f'to a value between '
f'{PYTHON_THREADPOOL_THREAD_COUNT_MIN} and '
'sys.maxint. Reverting to default value for '
'max_workers')
logger.warning(
'%s must be set to a value between %s and sys.maxint. '
'Reverting to default value for max_workers',
PYTHON_THREADPOOL_THREAD_COUNT,
PYTHON_THREADPOOL_THREAD_COUNT_MIN)
return False
return True

Expand Down
20 changes: 10 additions & 10 deletions azure_functions_worker/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,24 +236,24 @@ def _try_get_sdk_with_extension_enabled(cls) -> Optional[ModuleType]:
@classmethod
def _info_extension_is_enabled(cls, sdk):
logger.info(
'Python Worker Extension is enabled in azure.functions '
f'({get_sdk_version(sdk)}).'
'Python Worker Extension is enabled in azure.functions (%s).',
get_sdk_version(sdk)
)

@classmethod
def _info_discover_extension_list(cls, function_name, sdk):
logger.info(
f'Python Worker Extension Manager is loading {function_name}, '
'current registered extensions: '
f'{sdk.ExtensionMeta.get_registered_extensions_json()}'
'Python Worker Extension Manager is loading %s, current '
'registered extensions: %s',
function_name, sdk.ExtensionMeta.get_registered_extensions_json()
)

@classmethod
def _warn_sdk_not_support_extension(cls, sdk):
logger.warning(
f'The azure.functions ({get_sdk_version(sdk)}) does not '
'support Python worker extensions. If you believe extensions '
'are correctly installed, please set the '
f'{PYTHON_ISOLATE_WORKER_DEPENDENCIES} and '
f'{PYTHON_ENABLE_WORKER_EXTENSIONS} to "true"'
'The azure.functions (%s) does not support Python worker '
'extensions. If you believe extensions are correctly installed, '
'please set the %s and %s to "true"',
get_sdk_version(sdk), PYTHON_ISOLATE_WORKER_DEPENDENCIES,
PYTHON_ENABLE_WORKER_EXTENSIONS
)
4 changes: 2 additions & 2 deletions azure_functions_worker/testutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,8 +258,8 @@ def _run_test(self, test, *args, **kwargs):
self.host_stdout.seek(last_pos)
self.host_out = self.host_stdout.read()
self.host_stdout_logger.error(
f'Captured WebHost stdout from {self.host_stdout.name} '
f':\n{self.host_out}')
'Captured WebHost stdout from %s :\n%s',
self.host_stdout.name, self.host_out)
finally:
if test_exception is not None:
raise test_exception
Expand Down
Loading