about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/azure/ai/ml/_logging
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/_logging')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/__init__.py8
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/chained_identity.py68
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/compliant_logger.py190
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/debug_mode.py161
4 files changed, 427 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/__init__.py
new file mode 100644
index 00000000..d4c6b7a5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/__init__.py
@@ -0,0 +1,8 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from . import debug_mode
+from .chained_identity import START_MSG, STOP_MSG, ChainedIdentity
+
+__all__ = ["debug_mode", "ChainedIdentity", "START_MSG", "STOP_MSG"]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/chained_identity.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/chained_identity.py
new file mode 100644
index 00000000..5c683edc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/chained_identity.py
@@ -0,0 +1,68 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+import logging
+import os
+from contextlib import ContextDecorator
+from typing import Any, Optional
+
+START_MSG = "[START]"
+STOP_MSG = "[STOP]"
+
+_PRINT_STACK = os.environ.get("_AZUREML_TRACE_STACK", False)
+
+
+class ChainedIdentity(object):
+    """A mixin that provides structured, chained logging for objects and contexts."""
+
+    DELIM = "#"
+
+    def __init__(self, _ident: Optional[str] = None, _parent_logger: Optional[logging.Logger] = None, **kwargs):
+        """Internal class used to improve logging information.
+
+        :param _ident: Identity of the object
+        :type _ident: str
+        :param _parent_logger: Parent logger, used to maintain creation hierarchy
+        :type _parent_logger: logging.Logger
+        """
+
+        # TODO: Ideally move constructor params to None defaulted
+        # and pick up the stack trace as a reasonable approximation
+        self._identity = self.__class__.__name__ if _ident is None else _ident
+        parent = logging.getLogger("azureml") if _parent_logger is None else _parent_logger
+        self._logger = parent.getChild(self._identity)
+        try:
+            super(ChainedIdentity, self).__init__(**kwargs)
+        except TypeError as type_error:
+            raise TypeError(
+                "{}. Found key word arguments: {}.".format(",".join(type_error.args), kwargs.keys())
+            ) from type_error
+
+    @property
+    def identity(self) -> str:
+        return self._identity
+
+    def _log_context(self, context_name: str) -> Any:
+        return LogScope(_ident=context_name, _parent_logger=self._logger)
+
+
+class LogScope(ChainedIdentity, ContextDecorator):
+    """Convenience for logging a context."""
+
+    def __enter__(self) -> logging.Logger:
+        msg = START_MSG
+        if _PRINT_STACK:
+            import io
+            import traceback
+
+            stackstr = io.StringIO()
+            traceback.print_stack(file=stackstr)
+            msg = "{}\n{}".format(msg, stackstr.getvalue())
+        self._logger.debug(msg)
+        return self._logger
+
+    def __exit__(self, etype, value, traceback) -> None:
+        if value is not None:
+            self._logger.debug("Error {0}: {1}\n{2}".format(etype, value, traceback))
+        self._logger.debug(STOP_MSG)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/compliant_logger.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/compliant_logger.py
new file mode 100644
index 00000000..bd797790
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/compliant_logger.py
@@ -0,0 +1,190 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+""" This is logger utility which will work with allowed logged filter AML policy
+    https://github.com/Azure/azure-policy/blob/master/built-in-policies/policyDefinitions/Machine%20Learning/AllowedLogFilter_EnforceSetting.json
+    You have to define the same "logFilters" while initializing the logger using  "enable_compliant_logging" method
+    e.g. 
+        log filters: ["^SystemLog:.*$"]
+        initialize : enable_compliant_logging(format_key="prefix",
+                                format_key_value="SystemLog",
+                                format=f"%(prefix)s{logging.BASIC_FORMAT}")
+    By default log message will not compliant e.g. not modified
+"""
+
+import logging
+import sys
+from datetime import datetime
+from enum import Enum
+from threading import Lock
+from typing import Optional
+
+_LOCK = Lock()
+_FORMAT_KEY = None
+_FORMAT_VALUE = None
+
+
+# pylint: disable=global-statement
+def set_format(key_name: str, value: str) -> None:
+    with _LOCK:
+        global _FORMAT_KEY
+        _FORMAT_KEY = key_name
+        global _FORMAT_VALUE
+        _FORMAT_VALUE = value
+
+
+def get_format_key() -> Optional[str]:
+    return _FORMAT_KEY
+
+
+def get_format_value() -> Optional[str]:
+    return _FORMAT_VALUE
+
+
+def get_default_logging_format() -> str:
+    return f"%({get_format_key()})s{logging.BASIC_FORMAT}"
+
+
+class DataCategory(Enum):
+    """
+    Enumeration of data categories in compliant machine learning.
+
+    Values:
+    - PRIVATE: data which is private. Researchers may not view this.
+    - PUBLIC: data which may safely be viewed by researchers.
+    """
+
+    PRIVATE = 1
+    PUBLIC = 2
+
+
+class CompliantLogger(logging.getLoggerClass()):  # type: ignore
+    """
+    Subclass of the default logging class with an explicit `is_compliant` parameter
+    on all logging methods. It will pass an `extra` param with `format` key
+    (value depending on whether `is_compliant` is True or False) to the
+    handlers.
+
+    The default value for data `is_compliant` is `False` for all methods.
+
+    Implementation is inspired by:
+    https://github.com/python/cpython/blob/3.8/Lib/logging/__init__.py
+    """
+
+    def __init__(self, name: str, handlers=None):
+        super().__init__(name)  # type: ignore
+
+        self.format_key = get_format_key()
+        self.format_value = get_format_value()
+
+        if handlers:
+            self.handlers = handlers
+
+        self.start_time = datetime.now()
+        self.metric_count = 1
+        # number of iterable items that are logged
+        self.max_iter_items = 10
+
+    def _log(
+        self,
+        level,
+        msg,
+        args=None,
+        exc_info=None,
+        extra=None,
+        stack_info=False,
+        stacklevel=1,
+        category=DataCategory.PRIVATE,
+    ):
+        if category == DataCategory.PUBLIC:
+            format_value = self.format_value
+        else:
+            format_value = ""
+
+        if extra:
+            extra.update({self.format_key: format_value})
+        else:
+            extra = {self.format_key: format_value}
+
+        if sys.version_info[1] <= 7:
+            super(CompliantLogger, self)._log(
+                level=level,
+                msg=msg,
+                args=args,
+                exc_info=exc_info,
+                extra=extra,
+                stack_info=stack_info,
+            )
+        else:
+            super(CompliantLogger, self)._log(
+                level=level,
+                msg=msg,
+                args=args,
+                exc_info=exc_info,
+                extra=extra,
+                stack_info=stack_info,
+                stacklevel=stacklevel,  # type: ignore
+            )
+
+
+_logging_basic_config_set_warning = """
+********************************************************************************
+The root logger already has handlers set! As a result, the behavior of this
+library is undefined. If running in Python >= 3.8, this library will attempt to
+call logging.basicConfig(force=True), which will remove all existing root
+handlers. See https://stackoverflow.com/q/20240464 and
+https://github.com/Azure/confidential-ml-utils/issues/33 for more information.
+********************************************************************************
+"""
+
+
+def enable_compliant_logging(
+    format_key: str = "prefix",
+    format_key_value: str = "SystemLog:",
+    **kwargs,
+) -> None:
+    """
+    The default format is `logging.BASIC_FORMAT` (`%(levelname)s:%(name)s:%(message)s`).
+    All other kwargs are passed to `logging.basicConfig`. Sets the default
+    logger class and root logger to be compliant. This means the format
+    string `%(xxxx)` will work.
+
+    :param format_key: key for format
+    :type format_key: str
+    :param format_key_value: value for format
+    :type format_key_value: str
+
+    Set the format using the `format` kwarg.
+
+    If running in Python >= 3.8, will attempt to add `force=True` to the kwargs
+    for logging.basicConfig.
+
+    The standard implementation of the logging API is a good reference:
+    https://github.com/python/cpython/blob/3.9/Lib/logging/__init__.py
+    """
+    set_format(format_key, format_key_value)
+
+    if "format" not in kwargs:
+        kwargs["format"] = get_default_logging_format()
+
+    # Ensure that all loggers created via `logging.getLogger` are instances of
+    # the `CompliantLogger` class.
+    logging.setLoggerClass(CompliantLogger)
+
+    if len(logging.root.handlers) > 0:
+        p = get_format_value()
+        for line in _logging_basic_config_set_warning.splitlines():
+            print(f"{p}{line}", file=sys.stderr)
+
+    if "force" not in kwargs and sys.version_info >= (3, 8):
+        kwargs["force"] = True
+
+    root = CompliantLogger(logging.root.name, handlers=logging.root.handlers)
+
+    logging.root = root
+    logging.Logger.root = root  # type: ignore
+    logging.Logger.manager = logging.Manager(root)  # type: ignore
+
+    # https://github.com/kivy/kivy/issues/6733
+    logging.basicConfig(**kwargs)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/debug_mode.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/debug_mode.py
new file mode 100644
index 00000000..0107da6d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_logging/debug_mode.py
@@ -0,0 +1,161 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access
+
+import http.client
+import logging
+import os
+import sys
+import time
+import traceback
+from collections import namedtuple
+from typing import List, Optional
+
+LOG_FILE = os.path.abspath("azureml.log")
+LOG_FORMAT = "%(asctime)s|%(name)s|%(levelname)s|%(message)s"
+INTERESTING_NAMESPACES = ["azureml", "msrest.http_logger", "urllib2", "azure"]
+
+module_logger = logging.getLogger(__name__)
+separator = "\n==================\n"
+ConnectionInfo = namedtuple("ConnectionInfo", ["host", "port", "hasSocket"])
+
+
+def stack_info() -> list:
+    main_stack = []
+    for threadId, stack in sys._current_frames().items():
+        for filename, lineno, name, line in traceback.extract_stack(stack):
+            call = line.strip() if line is not None else None
+            main_stack.append(
+                {
+                    "ThreadID": threadId,
+                    "File": filename,
+                    "Line": lineno,
+                    "Name": name,
+                    "Call": call,
+                }
+            )
+
+    return main_stack
+
+
+def connection_info(gc_objects: list) -> List[ConnectionInfo]:
+    connections = [obj for obj in gc_objects if isinstance(obj, http.client.HTTPConnection)]
+    return [ConnectionInfo(host=c.host, port=c.port, hasSocket=c.sock is not None) for c in connections]  # disable
+
+
+# pylint: disable=client-incorrect-naming-convention
+class diagnostic_log(object):
+    """Directs debug logs to a specified file.
+
+    :param log_path: A path with log file name. If None, a file named "azureml.log" is
+        created.
+    :type log_path: str
+    :param namespaces: A list of namespaces to capture logs for. If None, the default is "azureml",
+        "msrest.http_logger", "urllib2", and "azure".
+    :type namespaces: builtin.list
+    :param context_name: A name to identify the logging context. If None, the context of the calling
+        stack frame is used.
+    :type context_type: str
+    """
+
+    def __init__(
+        self, log_path: Optional[str] = None, namespaces: Optional[list] = None, context_name: Optional[str] = None
+    ):
+        self._namespaces = INTERESTING_NAMESPACES if namespaces is None else namespaces
+        self._filename = LOG_FILE if log_path is None else log_path
+        self._filename = os.path.abspath(self._filename)
+        self._capturing = False
+        if context_name is None:
+            import inspect
+
+            context_name = inspect.getouterframes(inspect.currentframe(), 2)[1].function
+        self._context_name = context_name
+
+        formatter = logging.Formatter(LOG_FORMAT)
+        formatter.converter = time.gmtime
+
+        file_handler = logging.FileHandler(filename=self._filename, encoding="utf-8")
+        file_handler.setLevel(logging.DEBUG)
+        file_handler.setFormatter(formatter)
+        self._handler = file_handler
+
+    def start_capture(self) -> None:
+        """Start the capture of debug logs."""
+        if self._capturing:
+            module_logger.warning("Debug logs are already enabled at %s", self._filename)
+            return
+
+        print("Debug logs are being sent to {}".format(self._filename))
+        for namespace in self._namespaces:
+            module_logger.debug("Adding [%s] debug logs to this file", namespace)
+            n_logger = logging.getLogger(namespace)
+            n_logger.setLevel(logging.DEBUG)
+            n_logger.addHandler(self._handler)
+            # We do the below for strange environments like Revo + Jupyter
+            # where root handlers appear to already be set.
+            # We don't want to spew to those consoles with DEBUG emissions
+            n_logger.propagate = False
+
+        module_logger.info(
+            "\n\n********** STARTING CAPTURE FOR [%s] **********\n\n",
+            self._context_name,
+        )
+        self._capturing = True
+
+    def stop_capture(self) -> None:
+        """Stop the capture of debug logs."""
+        if not self._capturing:
+            module_logger.warning("Debug logs are already disabled.")
+            return
+
+        module_logger.info(
+            "\n\n********** STOPPING CAPTURE FOR [%s] **********\n\n",
+            self._context_name,
+        )
+        print("Disabling log capture. Resulting file is at {}".format(self._filename))
+
+        for namespace in self._namespaces:
+            module_logger.debug("Removing [%s] debug logs to this file", namespace)
+            n_logger = logging.getLogger(namespace)
+            n_logger.removeHandler(self._handler)
+
+        self._capturing = False
+
+    def __enter__(self) -> None:
+        self.start_capture()
+
+    def __exit__(self, exc_type, exc_value, exc_traceback) -> None:
+        self.stop_capture()
+
+
+_debugging_enabled = False
+
+
+def debug_sdk() -> None:
+    global _debugging_enabled  # pylint: disable=global-statement
+    if _debugging_enabled:
+        module_logger.warning("Debug logs are already enabled at %s", LOG_FILE)
+        return
+
+    formatter = logging.Formatter(LOG_FORMAT)
+    formatter.converter = time.gmtime
+
+    file_handler = logging.FileHandler(filename=LOG_FILE, encoding="utf-8")
+    file_handler.setLevel(logging.DEBUG)
+    file_handler.setFormatter(formatter)
+
+    module_logger.info("Debug logs are being sent to %s", LOG_FILE)
+
+    for namespace in INTERESTING_NAMESPACES:
+        module_logger.debug("Adding [%s] debug logs to this file", namespace)
+        n_logger = logging.getLogger(namespace)
+        n_logger.setLevel(logging.DEBUG)
+        n_logger.addHandler(file_handler)
+        # We do the below for strange environments like Revo + Jupyter
+        # where root handlers appear to already be set.
+        # We don't want to spew to those consoles with DEBUG emissions
+        n_logger.propagate = 0  # type: ignore[assignment]
+
+    _debugging_enabled = True