Skip to content

Commit

Permalink
Add logger caching by name, version, schema_url
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-jopel committed Oct 1, 2024
1 parent 8259ec5 commit 18eaa37
Show file tree
Hide file tree
Showing 3 changed files with 142 additions and 14 deletions.
38 changes: 38 additions & 0 deletions opentelemetry-sdk/benchmarks/logs/test_benchmark_logs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import logging

import pytest

from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
from opentelemetry.sdk._logs.export import (
InMemoryLogExporter,
SimpleLogRecordProcessor,
)


def set_up_logging_handler(level):
logger_provider = LoggerProvider()
exporter = InMemoryLogExporter()
processor = SimpleLogRecordProcessor(exporter=exporter)
logger_provider.add_log_record_processor(processor)
handler = LoggingHandler(level=level, logger_provider=logger_provider)
return handler


def create_logger(handler, name):
logger = logging.getLogger(name)
logger.addHandler(handler)
return logger


@pytest.mark.parametrize("num_loggers", [1, 10, 100, 1000, 10000])
def test_simple_get_logger_different_names(benchmark, num_loggers):
handler = set_up_logging_handler(level=logging.DEBUG)
loggers = [
create_logger(handler, str(f"logger_{i}")) for i in range(num_loggers)
]

def benchmark_get_logger():
for i in range(10000):
loggers[i % num_loggers].warning("test message")

benchmark(benchmark_get_logger)
44 changes: 30 additions & 14 deletions opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
import threading
import traceback
import warnings
from functools import lru_cache
from os import environ
from threading import Lock
from time import time_ns
from typing import Any, Callable, Optional, Tuple, Union # noqa

Expand Down Expand Up @@ -582,9 +582,12 @@ def emit(self, record: logging.LogRecord) -> None:

def flush(self) -> None:
"""
Flushes the logging output. Skip flushing if logger is NoOp.
Flushes the logging output. Skip flushing if logging_provider has no force_flush method.
"""
self._logger_provider.force_flush()
if hasattr(self._logger_provider, "force_flush") and callable(
self._logger_provider.force_flush
):
self._logger_provider.force_flush()


class Logger(APILogger):
Expand Down Expand Up @@ -641,12 +644,13 @@ def __init__(
self._at_exit_handler = None
if shutdown_on_exit:
self._at_exit_handler = atexit.register(self.shutdown)
self._logger_cache = {}
self._logger_cache_lock = Lock()

@property
def resource(self):
return self._resource

@lru_cache(maxsize=None)
def get_logger(
self,
name: str,
Expand All @@ -662,16 +666,28 @@ def get_logger(
schema_url=schema_url,
attributes=attributes,
)
return Logger(
self._resource,
self._multi_log_record_processor,
InstrumentationScope(
name,
version,
schema_url,
attributes,
),
)
key = (name, version, schema_url)
# Fast path if the logger is already in the cache, return it
if key in self._logger_cache:
return self._logger_cache[key]

# Lock to prevent race conditions when registering loggers with the same key
with self._logger_cache_lock:
# Check again in case another thread added the logger while waiting
if key in self._logger_cache:
return self._logger_cache[key]

self._logger_cache[key] = Logger(
self._resource,
self._multi_log_record_processor,
InstrumentationScope(
name,
version,
schema_url,
attributes,
),
)
return self._logger_cache[key]

def add_log_record_processor(
self, log_record_processor: LogRecordProcessor
Expand Down
74 changes: 74 additions & 0 deletions opentelemetry-sdk/tests/logs/test_log_provider_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import logging
import unittest

from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
from opentelemetry.sdk._logs.export import (
InMemoryLogExporter,
SimpleLogRecordProcessor,
)


def set_up_logging_handler(level):
logger_provider = LoggerProvider()
exporter = InMemoryLogExporter()
processor = SimpleLogRecordProcessor(exporter=exporter)
logger_provider.add_log_record_processor(processor)
handler = LoggingHandler(level=level, logger_provider=logger_provider)
return handler, logger_provider


def create_logger(handler, name):
logger = logging.getLogger(name)
logger.addHandler(handler)
return logger


class TestLogProviderCache(unittest.TestCase):

def test_get_logger_single_handler(self):
handler, logger_provider = set_up_logging_handler(level=logging.DEBUG)

logger = create_logger(handler, "test_logger")

logger.warning("test message")

self.assertEqual(1, len(logger_provider._logger_cache))
self.assertTrue(
("test_logger", "", None) in logger_provider._logger_cache
)

rounds = 100
for _ in range(rounds):
logger.warning("test message")

self.assertEqual(1, len(logger_provider._logger_cache))
self.assertTrue(
("test_logger", "", None) in logger_provider._logger_cache
)

def test_get_logger_multiple_loggers(self):
handler, logger_provider = set_up_logging_handler(level=logging.DEBUG)

num_loggers = 10
loggers = [create_logger(handler, str(i)) for i in range(num_loggers)]

for logger in loggers:
logger.warning("test message")

self.assertEqual(num_loggers, len(logger_provider._logger_cache))
print(logger_provider._logger_cache)
for logger in loggers:
self.assertTrue(
(logger.name, "", None) in logger_provider._logger_cache
)

rounds = 100
for _ in range(rounds):
for logger in loggers:
logger.warning("test message")

self.assertEqual(num_loggers, len(logger_provider._logger_cache))
for logger in loggers:
self.assertTrue(
(logger.name, "", None) in logger_provider._logger_cache
)

0 comments on commit 18eaa37

Please sign in to comment.