Skip to content

Commit

Permalink
Fix install of Python 3.10 on GitHub Actions (#1609)
Browse files Browse the repository at this point in the history
* Fix install of Python 3.10 on GitHub Actions

In PR #1604 the Python version was upgraded to Python 3.10 to fix a
local issue on M1 MacBooks.

The GitHub Action workflows now exit with the following message for the
docker-tests, spellcheck and lint checks, skipping these checks.

```
lint create: /home/runner/work/opentelemetry-python-contrib/opentelemetry-python-contrib/.tox/lint
SKIPPED: InterpreterNotFound: python3.10
___________________________________ summary ____________________________________
SKIPPED:  lint: InterpreterNotFound: python3.10
  congratulations :)
```

Upgrade the Python version in the GitHub Actions workflow to fix this.

* Fix YAML interpretation of Python 3.10

* Upgrade Docker tests dependencies

Upgrade the asyncpg and psycopg2 packages, they don't work on Python
3.10.

This also fixes running these tests no M1 MacBooks.

* Fix linter issues merged into main

They went unnoticed while the CI didn't fail on the lint task not
working.

---------

Co-authored-by: Srikanth Chekuri <[email protected]>
  • Loading branch information
tombruijn and srikanthccv authored Feb 3, 2023
1 parent d8788b6 commit 78874df
Show file tree
Hide file tree
Showing 50 changed files with 36 additions and 89 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -102,10 +102,10 @@ jobs:
steps:
- name: Checkout Contrib Repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v2
- name: Set up Python 3.9
- name: Set up Python 3.10
uses: actions/setup-python@v2
with:
python-version: 3.9
python-version: "3.10"
- name: Install tox
run: pip install tox==3.27.1
- name: Install libsnappy-dev
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@

# Callback to gather cpu usage
def get_cpu_usage_callback(observer):
for (number, percent) in enumerate(psutil.cpu_percent(percpu=True)):
for number, percent in enumerate(psutil.cpu_percent(percpu=True)):
labels = {"cpu_number": str(number)}
yield Observation(percent, labels)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,6 @@ def _sanitize_string(string: str, type_: str) -> str:
return sanitized

def _parse_histogram_data_point(self, data_point, name):

sample_attr_pairs = []

base_attrs = list(data_point.attributes.items())
Expand Down Expand Up @@ -341,7 +340,6 @@ def handle_bucket(value, bound=None, name_override=None):
return sample_attr_pairs

def _parse_data_point(self, data_point, name=None):

attrs = tuple(data_point.attributes.items()) + (
("__name__", self._sanitize_string(name, "name")),
)
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ def test_regex_invalid(prom_rw):


def test_parse_data_point(prom_rw):

attrs = {"Foo": "Bar", "Baz": 42}
timestamp = 1641946016139533244
value = 242.42
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@ def get_traced_cursor_proxy(cursor, db_api_integration, *args, **kwargs):

# pylint: disable=abstract-method
class AsyncCursorTracerProxy(AsyncProxyObject):

# pylint: disable=unused-argument
def __init__(self, cursor, *args, **kwargs):
super().__init__(cursor)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,8 @@ def collect_request_attributes(scope):

def collect_custom_request_headers_attributes(scope):
"""returns custom HTTP request headers to be added into SERVER span as span attributes
Refer specification https:/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/http.md#http-request-and-response-headers"""
Refer specification https:/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/http.md#http-request-and-response-headers
"""

sanitize = SanitizeValue(
get_custom_headers(
Expand All @@ -359,7 +360,8 @@ def collect_custom_request_headers_attributes(scope):

def collect_custom_response_headers_attributes(message):
"""returns custom HTTP response headers to be added into SERVER span as span attributes
Refer specification https:/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/http.md#http-request-and-response-headers"""
Refer specification https:/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/http.md#http-request-and-response-headers
"""

sanitize = SanitizeValue(
get_custom_headers(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@ def _uninstrument(self, **__):
unwrap(asyncpg.Connection, method)

async def _do_execute(self, func, instance, args, kwargs):

exception = None
params = getattr(instance, "_params", {})
name = args[0] if args[0] else params.get("database", "postgresql")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,7 @@ def custom_event_context_extractor(lambda_event):
from opentelemetry.instrumentation.aws_lambda.version import __version__
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.utils import unwrap
from opentelemetry.metrics import (
MeterProvider,
get_meter_provider,
)
from opentelemetry.metrics import MeterProvider, get_meter_provider
from opentelemetry.propagate import get_global_textmap
from opentelemetry.propagators.aws.aws_xray_propagator import (
TRACE_HEADER_KEY,
Expand Down Expand Up @@ -282,7 +279,7 @@ def _instrument(
disable_aws_context_propagation: bool = False,
meter_provider: MeterProvider = None,
):
def _instrumented_lambda_handler_call(
def _instrumented_lambda_handler_call( # noqa pylint: disable=too-many-branches
call_wrapped, instance, args, kwargs
):
orig_handler_name = ".".join(
Expand Down Expand Up @@ -366,23 +363,21 @@ def _instrumented_lambda_handler_call(
# NOTE: `force_flush` before function quit in case of Lambda freeze.
_tracer_provider.force_flush(flush_timeout)
except Exception: # pylint: disable=broad-except
logger.exception(
f"TracerProvider failed to flush traces"
)
logger.exception("TracerProvider failed to flush traces")
else:
logger.warning("TracerProvider was missing `force_flush` method. This is necessary in case of a Lambda freeze and would exist in the OTel SDK implementation.")
logger.warning(
"TracerProvider was missing `force_flush` method. This is necessary in case of a Lambda freeze and would exist in the OTel SDK implementation."
)

_meter_provider = meter_provider or get_meter_provider()
if hasattr(_meter_provider, "force_flush"):
rem = flush_timeout - (time.time()-now)*1000
rem = flush_timeout - (time.time() - now) * 1000
if rem > 0:
try:
# NOTE: `force_flush` before function quit in case of Lambda freeze.
_meter_provider.force_flush(rem)
except Exception: # pylint: disable=broad-except
logger.exception(
f"MeterProvider failed to flush metrics"
)
logger.exception("MeterProvider failed to flush metrics")
else:
logger.warning(
"MeterProvider was missing `force_flush` method. This is necessary in case of a Lambda freeze and would exist in the OTel SDK implementation."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,6 @@ def custom_event_context_extractor(lambda_event):
test_env_patch.stop()

def test_lambda_no_error_with_invalid_flush_timeout(self):

test_env_patch = mock.patch.dict(
"os.environ",
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,6 @@ def _common_request( # pylint: disable=too-many-locals
args,
kwargs,
):

endpoint_name = getattr(instance, "host").split(".")[0]

with self._tracer.start_as_current_span(
Expand Down Expand Up @@ -166,7 +165,6 @@ def _common_request( # pylint: disable=too-many-locals
return result

def _patched_query_request(self, original_func, instance, args, kwargs):

return self._common_request(
("operation_name", "params", "path", "verb"),
["operation_name", "params", "path"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,6 @@ def test_s3_put(self):

@mock_lambda_deprecated
def test_unpatch(self):

lamb = boto.awslambda.connect_to_region("us-east-2")

BotoInstrumentor().uninstrument()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,6 @@ def instrument_consumer(consumer: Consumer, tracer_provider=None)


class AutoInstrumentedProducer(Producer):

# This method is deliberately implemented in order to allow wrapt to wrap this function
def produce(
self, topic, value=None, *args, **kwargs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@ def _enrich_span(
offset: Optional[int] = None,
operation: Optional[MessagingOperationValues] = None,
):

if not span.is_recording():
return

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -471,7 +471,6 @@ def get_traced_cursor_proxy(cursor, db_api_integration, *args, **kwargs):

# pylint: disable=abstract-method
class TracedCursorProxy(wrapt.ObjectProxy):

# pylint: disable=unused-argument
def __init__(self, cursor, *args, **kwargs):
wrapt.ObjectProxy.__init__(self, cursor)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,6 @@ def test_executemany(self):
)

def test_executemany_comment(self):

connect_module = mock.MagicMock()
connect_module.__version__ = mock.MagicMock()
connect_module.__libpq_version__ = 123
Expand All @@ -262,7 +261,6 @@ def test_executemany_comment(self):
)

def test_executemany_flask_integration_comment(self):

connect_module = mock.MagicMock()
connect_module.__version__ = mock.MagicMock()
connect_module.__libpq_version__ = 123
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,6 @@ def instrumentation_dependencies(self) -> Collection[str]:
return _instruments

def _instrument(self, **kwargs):

# FIXME this is probably a pattern that will show up in the rest of the
# ext. Find a better way of implementing this.
if environ.get(OTEL_PYTHON_DJANGO_INSTRUMENT) == "False":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,6 @@ def wrapper(wrapped, _, args, kwargs):
op_name,
kind=SpanKind.CLIENT,
) as span:

if callable(request_hook):
request_hook(span, method, url, kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,6 @@ def test_request_hook(self, request_mock):
request_hook_kwargs_attribute = "request_hook.kwargs"

def request_hook(span, method, url, kwargs):

attributes = {
request_hook_method_attribute: method,
request_hook_url_attribute: url,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -456,7 +456,6 @@ def _teardown_request(exc):


class _InstrumentedFlask(flask.Flask):

_excluded_urls = None
_tracer_provider = None
_request_hook = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ def tearDown(self):
FlaskInstrumentor().uninstrument()

def test_sqlcommenter_enabled_default(self):

self.app = flask.Flask(__name__)
self.app.route("/sqlcommenter")(self._sqlcommenter_endpoint)
client = Client(self.app, Response)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,6 @@ def _set_remote_context(self, servicer_context):
def _start_span(
self, handler_call_details, context, set_status_on_exception=False
):

# standard attributes
attributes = {
SpanAttributes.RPC_SYSTEM: "grpc",
Expand Down Expand Up @@ -283,7 +282,6 @@ def intercept_service(self, continuation, handler_call_details):

def telemetry_wrapper(behavior, request_streaming, response_streaming):
def telemetry_interceptor(request_or_iterator, context):

# handle streaming responses specially
if response_streaming:
return self._intercept_server_stream(
Expand Down Expand Up @@ -327,7 +325,6 @@ def telemetry_interceptor(request_or_iterator, context):
def _intercept_server_stream(
self, behavior, handler_call_details, request_or_iterator, context
):

with self._set_remote_context(context):
with self._start_span(
handler_call_details, context, set_status_on_exception=False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ def ClientStreamingMethod(self, request_iterator, context):

def ServerStreamingMethod(self, request, context):
if request.request_data == "error":

context.abort(
code=grpc.StatusCode.INVALID_ARGUMENT,
details="server stream error",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,6 @@ async def test_create_two_spans(self):
class TwoSpanServicer(GRPCTestServerServicer):
# pylint:disable=C0103
async def SimpleMethod(self, request, context):

# create another span
tracer = trace.get_tracer(__name__)
with tracer.start_as_current_span("child") as child:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,6 @@ def test_create_two_spans(self):
class TwoSpanServicer(GRPCTestServerServicer):
# pylint:disable=C0103
def SimpleMethod(self, request, context):

# create another span
tracer = trace.get_tracer(__name__)
with tracer.start_as_current_span("child") as child:
Expand Down Expand Up @@ -347,7 +346,6 @@ def test_create_two_spans_streaming(self):
class TwoSpanServicer(GRPCTestServerServicer):
# pylint:disable=C0103
def ServerStreamingMethod(self, request, context):

# create another span
tracer = trace.get_tracer(__name__)
with tracer.start_as_current_span("child") as child:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,6 @@ async def handle_async_request(


class _InstrumentedClient(httpx.Client):

_tracer_provider = None
_request_hook = None
_response_hook = None
Expand All @@ -445,7 +444,6 @@ def __init__(self, *args, **kwargs):


class _InstrumentedAsyncClient(httpx.AsyncClient):

_tracer_provider = None
_request_hook = None
_response_hook = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,6 @@ def _wrap_next(
consume_hook: ConsumeHookT,
) -> Callable:
def _traced_next(func, instance, args, kwargs):

record = func(*args, **kwargs)

if record:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ def instrumentation_dependencies(self) -> Collection[str]:
return _instruments

def _instrument(self, **kwargs):

provider = kwargs.get("tracer_provider", None) or get_tracer_provider()
old_factory = logging.getLogRecordFactory()
LoggingInstrumentor._old_factory = old_factory
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@


class MockCursor:

execute = mock.MagicMock(spec=types.MethodType)
execute.__name__ = "execute"

Expand All @@ -47,7 +46,6 @@ def __exit__(self, *args):


class MockConnection:

commit = mock.MagicMock(spec=types.MethodType)
commit.__name__ = "commit"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@ def _wrap_cmd(tracer, cmd, wrapped, instance, args, kwargs):


def _get_query_string(arg):

"""Return the query values given the first argument to a pymemcache command.
If there are multiple query values, they are joined together
Expand Down
Loading

0 comments on commit 78874df

Please sign in to comment.