Enable lint on CI and update deps (#2067)
* Fix black an isort * change bootstrap_gen to use a list instead of dict * Bunch of updates * Fix build * fix lint * Fix docs * Fix lint * More fixes * Fix lint * fix stupid mistake --------- Co-authored-by: Christian Hartung <christian.hartung@olist.com>
This commit is contained in:
parent
9afaf26b3a
commit
5888d4ef95
13
.pylintrc
13
.pylintrc
|
|
@ -3,7 +3,7 @@
|
||||||
# A comma-separated list of package or module names from where C extensions may
|
# A comma-separated list of package or module names from where C extensions may
|
||||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||||
# run arbitrary code.
|
# run arbitrary code.
|
||||||
extension-pkg-whitelist=
|
extension-pkg-whitelist=cassandra
|
||||||
|
|
||||||
# Add list of files or directories to be excluded. They should be base names, not
|
# Add list of files or directories to be excluded. They should be base names, not
|
||||||
# paths.
|
# paths.
|
||||||
|
|
@ -29,7 +29,7 @@ limit-inference-results=100
|
||||||
|
|
||||||
# List of plugins (as comma separated values of python modules names) to load,
|
# List of plugins (as comma separated values of python modules names) to load,
|
||||||
# usually to register additional checkers.
|
# usually to register additional checkers.
|
||||||
load-plugins=
|
load-plugins=pylint.extensions.no_self_use
|
||||||
|
|
||||||
# Pickle collected data for later comparisons.
|
# Pickle collected data for later comparisons.
|
||||||
persistent=yes
|
persistent=yes
|
||||||
|
|
@ -69,7 +69,6 @@ disable=missing-docstring,
|
||||||
duplicate-code,
|
duplicate-code,
|
||||||
ungrouped-imports, # Leave this up to isort
|
ungrouped-imports, # Leave this up to isort
|
||||||
wrong-import-order, # Leave this up to isort
|
wrong-import-order, # Leave this up to isort
|
||||||
bad-continuation, # Leave this up to black
|
|
||||||
line-too-long, # Leave this up to black
|
line-too-long, # Leave this up to black
|
||||||
exec-used,
|
exec-used,
|
||||||
super-with-arguments, # temp-pylint-upgrade
|
super-with-arguments, # temp-pylint-upgrade
|
||||||
|
|
@ -81,6 +80,7 @@ disable=missing-docstring,
|
||||||
invalid-overridden-method, # temp-pylint-upgrade
|
invalid-overridden-method, # temp-pylint-upgrade
|
||||||
missing-module-docstring, # temp-pylint-upgrade
|
missing-module-docstring, # temp-pylint-upgrade
|
||||||
import-error, # needed as a workaround as reported here: https://github.com/open-telemetry/opentelemetry-python-contrib/issues/290
|
import-error, # needed as a workaround as reported here: https://github.com/open-telemetry/opentelemetry-python-contrib/issues/290
|
||||||
|
cyclic-import,
|
||||||
|
|
||||||
# Enable the message, report, category or checker with the given id(s). You can
|
# Enable the message, report, category or checker with the given id(s). You can
|
||||||
# either give multiple identifier separated by comma (,) or put this option
|
# either give multiple identifier separated by comma (,) or put this option
|
||||||
|
|
@ -268,13 +268,6 @@ max-line-length=79
|
||||||
# Maximum number of lines in a module.
|
# Maximum number of lines in a module.
|
||||||
max-module-lines=1000
|
max-module-lines=1000
|
||||||
|
|
||||||
# List of optional constructs for which whitespace checking is disabled. `dict-
|
|
||||||
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
|
|
||||||
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
|
|
||||||
# `empty-line` allows space-only lines.
|
|
||||||
no-space-check=trailing-comma,
|
|
||||||
dict-separator
|
|
||||||
|
|
||||||
# Allow the body of a class to be on the same line as the declaration if body
|
# Allow the body of a class to be on the same line as the declaration if body
|
||||||
# contains single statement.
|
# contains single statement.
|
||||||
single-line-class-stmt=no
|
single-line-class-stmt=no
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,19 @@
|
||||||
pylint==2.12.2
|
pylint==3.0.2
|
||||||
flake8~=3.7
|
flake8==6.1.0
|
||||||
isort~=5.6
|
isort==5.12.0
|
||||||
black>=22.1.0
|
black==22.3.0
|
||||||
httpretty~=1.0
|
httpretty==1.1.4
|
||||||
mypy==0.790
|
mypy==0.931
|
||||||
sphinx
|
sphinx==7.1.2
|
||||||
sphinx-rtd-theme~=0.4
|
sphinx-rtd-theme==2.0.0rc4
|
||||||
sphinx-autodoc-typehints
|
sphinx-autodoc-typehints==1.25.2
|
||||||
pytest!=5.2.3
|
pytest==7.1.3
|
||||||
pytest-cov>=2.8
|
pytest-cov==4.1.0
|
||||||
readme-renderer~=24.0
|
readme-renderer==42.0
|
||||||
bleach==4.1.0 # transient dependency for readme-renderer
|
bleach==4.1.0 # transient dependency for readme-renderer
|
||||||
grpcio-tools==1.29.0
|
|
||||||
mypy-protobuf>=1.23
|
|
||||||
protobuf~=3.13
|
protobuf~=3.13
|
||||||
markupsafe>=2.0.1
|
markupsafe>=2.0.1
|
||||||
codespell==2.1.0
|
codespell==2.1.0
|
||||||
requests==2.31.0
|
requests==2.31.0
|
||||||
ruamel.yaml==0.17.21
|
ruamel.yaml==0.17.21
|
||||||
|
flaky==3.7.0
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
sphinx==4.5.0
|
sphinx==7.1.2
|
||||||
sphinx-rtd-theme~=0.4
|
sphinx-rtd-theme==2.0.0rc4
|
||||||
sphinx-autodoc-typehints
|
sphinx-autodoc-typehints==1.25.2
|
||||||
|
|
||||||
# Need to install the api/sdk in the venv for autodoc. Modifying sys.path
|
# Need to install the api/sdk in the venv for autodoc. Modifying sys.path
|
||||||
# doesn't work for pkg_resources.
|
# doesn't work for pkg_resources.
|
||||||
|
|
@ -45,11 +45,8 @@ remoulade>=0.50
|
||||||
sqlalchemy>=1.0
|
sqlalchemy>=1.0
|
||||||
tornado>=5.1.1
|
tornado>=5.1.1
|
||||||
tortoise-orm>=0.17.0
|
tortoise-orm>=0.17.0
|
||||||
ddtrace>=0.34.0
|
|
||||||
httpx>=0.18.0
|
httpx>=0.18.0
|
||||||
|
|
||||||
# indirect dependency pins
|
# indirect dependency pins
|
||||||
markupsafe==2.0.1
|
markupsafe==2.0.1
|
||||||
itsdangerous==2.0.1
|
itsdangerous==2.0.1
|
||||||
|
|
||||||
docutils==0.16
|
|
||||||
|
|
@ -54,6 +54,7 @@ packages=
|
||||||
[lintroots]
|
[lintroots]
|
||||||
extraroots=examples/*,scripts/
|
extraroots=examples/*,scripts/
|
||||||
subglob=*.py,tests/,test/,src/*,examples/*
|
subglob=*.py,tests/,test/,src/*,examples/*
|
||||||
|
ignore=sklearn
|
||||||
|
|
||||||
[testroots]
|
[testroots]
|
||||||
extraroots=examples/*,tests/
|
extraroots=examples/*,tests/
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,8 @@ import random
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import opentelemetry.test.metrictestutil as metric_util
|
import opentelemetry.test.metrictestutil as metric_util
|
||||||
|
|
||||||
|
# pylint: disable=no-name-in-module
|
||||||
from opentelemetry.exporter.prometheus_remote_write import (
|
from opentelemetry.exporter.prometheus_remote_write import (
|
||||||
PrometheusRemoteWriteMetricsExporter,
|
PrometheusRemoteWriteMetricsExporter,
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,7 @@ from unittest.mock import patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
# pylint: disable=no-name-in-module
|
||||||
from opentelemetry.exporter.prometheus_remote_write import (
|
from opentelemetry.exporter.prometheus_remote_write import (
|
||||||
PrometheusRemoteWriteMetricsExporter,
|
PrometheusRemoteWriteMetricsExporter,
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import contextlib
|
import contextlib
|
||||||
|
import sys
|
||||||
import typing
|
import typing
|
||||||
import unittest
|
import unittest
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
@ -116,6 +117,11 @@ class TestAioHttpIntegration(TestBase):
|
||||||
status_code=status_code,
|
status_code=status_code,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
url = f"http://{host}:{port}/test-path?query=param#foobar"
|
||||||
|
# if python version is < 3.8, then the url will be
|
||||||
|
if sys.version_info[1] < 8:
|
||||||
|
url = f"http://{host}:{port}/test-path#foobar"
|
||||||
|
|
||||||
self.assert_spans(
|
self.assert_spans(
|
||||||
[
|
[
|
||||||
(
|
(
|
||||||
|
|
@ -123,7 +129,7 @@ class TestAioHttpIntegration(TestBase):
|
||||||
(span_status, None),
|
(span_status, None),
|
||||||
{
|
{
|
||||||
SpanAttributes.HTTP_METHOD: "GET",
|
SpanAttributes.HTTP_METHOD: "GET",
|
||||||
SpanAttributes.HTTP_URL: f"http://{host}:{port}/test-path#foobar",
|
SpanAttributes.HTTP_URL: url,
|
||||||
SpanAttributes.HTTP_STATUS_CODE: int(
|
SpanAttributes.HTTP_STATUS_CODE: int(
|
||||||
status_code
|
status_code
|
||||||
),
|
),
|
||||||
|
|
@ -136,7 +142,7 @@ class TestAioHttpIntegration(TestBase):
|
||||||
|
|
||||||
def test_schema_url(self):
|
def test_schema_url(self):
|
||||||
with self.subTest(status_code=200):
|
with self.subTest(status_code=200):
|
||||||
host, port = self._http_request(
|
self._http_request(
|
||||||
trace_config=aiohttp_client.create_trace_config(),
|
trace_config=aiohttp_client.create_trace_config(),
|
||||||
url="/test-path?query=param#foobar",
|
url="/test-path?query=param#foobar",
|
||||||
status_code=200,
|
status_code=200,
|
||||||
|
|
@ -156,7 +162,7 @@ class TestAioHttpIntegration(TestBase):
|
||||||
mock_tracer.start_span.return_value = mock_span
|
mock_tracer.start_span.return_value = mock_span
|
||||||
with mock.patch("opentelemetry.trace.get_tracer"):
|
with mock.patch("opentelemetry.trace.get_tracer"):
|
||||||
# pylint: disable=W0612
|
# pylint: disable=W0612
|
||||||
host, port = self._http_request(
|
self._http_request(
|
||||||
trace_config=aiohttp_client.create_trace_config(),
|
trace_config=aiohttp_client.create_trace_config(),
|
||||||
url="/test-path?query=param#foobar",
|
url="/test-path?query=param#foobar",
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -13,24 +13,24 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import urllib
|
import urllib
|
||||||
|
from timeit import default_timer
|
||||||
|
from typing import Dict, List, Tuple, Union
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from multidict import CIMultiDictProxy
|
from multidict import CIMultiDictProxy
|
||||||
from timeit import default_timer
|
|
||||||
from typing import Tuple, Dict, List, Union
|
|
||||||
|
|
||||||
from opentelemetry import context, trace, metrics
|
from opentelemetry import context, metrics, trace
|
||||||
from opentelemetry.context import _SUPPRESS_HTTP_INSTRUMENTATION_KEY
|
from opentelemetry.context import _SUPPRESS_HTTP_INSTRUMENTATION_KEY
|
||||||
from opentelemetry.instrumentation.aiohttp_server.package import _instruments
|
from opentelemetry.instrumentation.aiohttp_server.package import _instruments
|
||||||
from opentelemetry.instrumentation.aiohttp_server.version import __version__
|
from opentelemetry.instrumentation.aiohttp_server.version import __version__
|
||||||
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
||||||
from opentelemetry.instrumentation.utils import http_status_to_status_code
|
from opentelemetry.instrumentation.utils import http_status_to_status_code
|
||||||
from opentelemetry.propagators.textmap import Getter
|
|
||||||
from opentelemetry.propagate import extract
|
from opentelemetry.propagate import extract
|
||||||
from opentelemetry.semconv.trace import SpanAttributes
|
from opentelemetry.propagators.textmap import Getter
|
||||||
from opentelemetry.semconv.metrics import MetricInstruments
|
from opentelemetry.semconv.metrics import MetricInstruments
|
||||||
|
from opentelemetry.semconv.trace import SpanAttributes
|
||||||
from opentelemetry.trace.status import Status, StatusCode
|
from opentelemetry.trace.status import Status, StatusCode
|
||||||
from opentelemetry.util.http import get_excluded_urls
|
from opentelemetry.util.http import get_excluded_urls, remove_url_credentials
|
||||||
from opentelemetry.util.http import remove_url_credentials
|
|
||||||
|
|
||||||
_duration_attrs = [
|
_duration_attrs = [
|
||||||
SpanAttributes.HTTP_METHOD,
|
SpanAttributes.HTTP_METHOD,
|
||||||
|
|
@ -127,7 +127,7 @@ def collect_request_attributes(request: web.Request) -> Dict:
|
||||||
result[SpanAttributes.HTTP_METHOD] = http_method
|
result[SpanAttributes.HTTP_METHOD] = http_method
|
||||||
|
|
||||||
http_host_value_list = (
|
http_host_value_list = (
|
||||||
[request.host] if type(request.host) != list else request.host
|
[request.host] if not isinstance(request.host, list) else request.host
|
||||||
)
|
)
|
||||||
if http_host_value_list:
|
if http_host_value_list:
|
||||||
result[SpanAttributes.HTTP_SERVER_NAME] = ",".join(
|
result[SpanAttributes.HTTP_SERVER_NAME] = ",".join(
|
||||||
|
|
|
||||||
|
|
@ -12,25 +12,42 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
from http import HTTPStatus
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_asyncio
|
import pytest_asyncio
|
||||||
import aiohttp
|
|
||||||
from http import HTTPStatus
|
|
||||||
from .utils import HTTPMethod
|
|
||||||
|
|
||||||
from opentelemetry import trace as trace_api
|
from opentelemetry import trace as trace_api
|
||||||
from opentelemetry.test.test_base import TestBase
|
from opentelemetry.instrumentation.aiohttp_server import (
|
||||||
from opentelemetry.instrumentation.aiohttp_server import AioHttpServerInstrumentor
|
AioHttpServerInstrumentor,
|
||||||
|
)
|
||||||
from opentelemetry.semconv.trace import SpanAttributes
|
from opentelemetry.semconv.trace import SpanAttributes
|
||||||
|
from opentelemetry.test.globals_test import reset_trace_globals
|
||||||
|
from opentelemetry.test.test_base import TestBase
|
||||||
from opentelemetry.util._importlib_metadata import entry_points
|
from opentelemetry.util._importlib_metadata import entry_points
|
||||||
|
|
||||||
from opentelemetry.test.globals_test import (
|
|
||||||
reset_trace_globals,
|
class HTTPMethod(Enum):
|
||||||
)
|
"""HTTP methods and descriptions"""
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"{self.value}"
|
||||||
|
|
||||||
|
CONNECT = "CONNECT"
|
||||||
|
DELETE = "DELETE"
|
||||||
|
GET = "GET"
|
||||||
|
HEAD = "HEAD"
|
||||||
|
OPTIONS = "OPTIONS"
|
||||||
|
PATCH = "PATCH"
|
||||||
|
POST = "POST"
|
||||||
|
PUT = "PUT"
|
||||||
|
TRACE = "TRACE"
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(name="tracer", scope="session")
|
||||||
def tracer():
|
def fixture_tracer():
|
||||||
test_base = TestBase()
|
test_base = TestBase()
|
||||||
|
|
||||||
tracer_provider, memory_exporter = test_base.create_tracer_provider()
|
tracer_provider, memory_exporter = test_base.create_tracer_provider()
|
||||||
|
|
@ -47,15 +64,14 @@ async def default_handler(request, status=200):
|
||||||
return aiohttp.web.Response(status=status)
|
return aiohttp.web.Response(status=status)
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
@pytest_asyncio.fixture(name="server_fixture")
|
||||||
async def server_fixture(tracer, aiohttp_server):
|
async def fixture_server_fixture(tracer, aiohttp_server):
|
||||||
_, memory_exporter = tracer
|
_, memory_exporter = tracer
|
||||||
|
|
||||||
AioHttpServerInstrumentor().instrument()
|
AioHttpServerInstrumentor().instrument()
|
||||||
|
|
||||||
app = aiohttp.web.Application()
|
app = aiohttp.web.Application()
|
||||||
app.add_routes(
|
app.add_routes([aiohttp.web.get("/test-path", default_handler)])
|
||||||
[aiohttp.web.get("/test-path", default_handler)])
|
|
||||||
|
|
||||||
server = await aiohttp_server(app)
|
server = await aiohttp_server(app)
|
||||||
yield server, app
|
yield server, app
|
||||||
|
|
@ -67,26 +83,31 @@ async def server_fixture(tracer, aiohttp_server):
|
||||||
|
|
||||||
def test_checking_instrumentor_pkg_installed():
|
def test_checking_instrumentor_pkg_installed():
|
||||||
|
|
||||||
(instrumentor_entrypoint,) = entry_points(group="opentelemetry_instrumentor", name="aiohttp-server")
|
(instrumentor_entrypoint,) = entry_points(
|
||||||
|
group="opentelemetry_instrumentor", name="aiohttp-server"
|
||||||
|
)
|
||||||
instrumentor = instrumentor_entrypoint.load()()
|
instrumentor = instrumentor_entrypoint.load()()
|
||||||
assert (isinstance(instrumentor, AioHttpServerInstrumentor))
|
assert isinstance(instrumentor, AioHttpServerInstrumentor)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@pytest.mark.parametrize("url, expected_method, expected_status_code", [
|
@pytest.mark.parametrize(
|
||||||
("/test-path", HTTPMethod.GET, HTTPStatus.OK),
|
"url, expected_method, expected_status_code",
|
||||||
("/not-found", HTTPMethod.GET, HTTPStatus.NOT_FOUND)
|
[
|
||||||
])
|
("/test-path", HTTPMethod.GET, HTTPStatus.OK),
|
||||||
|
("/not-found", HTTPMethod.GET, HTTPStatus.NOT_FOUND),
|
||||||
|
],
|
||||||
|
)
|
||||||
async def test_status_code_instrumentation(
|
async def test_status_code_instrumentation(
|
||||||
tracer,
|
tracer,
|
||||||
server_fixture,
|
server_fixture,
|
||||||
aiohttp_client,
|
aiohttp_client,
|
||||||
url,
|
url,
|
||||||
expected_method,
|
expected_method,
|
||||||
expected_status_code
|
expected_status_code,
|
||||||
):
|
):
|
||||||
_, memory_exporter = tracer
|
_, memory_exporter = tracer
|
||||||
server, app = server_fixture
|
server, _ = server_fixture
|
||||||
|
|
||||||
assert len(memory_exporter.get_finished_spans()) == 0
|
assert len(memory_exporter.get_finished_spans()) == 0
|
||||||
|
|
||||||
|
|
@ -98,8 +119,12 @@ async def test_status_code_instrumentation(
|
||||||
[span] = memory_exporter.get_finished_spans()
|
[span] = memory_exporter.get_finished_spans()
|
||||||
|
|
||||||
assert expected_method.value == span.attributes[SpanAttributes.HTTP_METHOD]
|
assert expected_method.value == span.attributes[SpanAttributes.HTTP_METHOD]
|
||||||
assert expected_status_code == span.attributes[SpanAttributes.HTTP_STATUS_CODE]
|
assert (
|
||||||
|
expected_status_code
|
||||||
|
== span.attributes[SpanAttributes.HTTP_STATUS_CODE]
|
||||||
|
)
|
||||||
|
|
||||||
assert f"http://{server.host}:{server.port}{url}" == span.attributes[
|
assert (
|
||||||
SpanAttributes.HTTP_URL
|
f"http://{server.host}:{server.port}{url}"
|
||||||
]
|
== span.attributes[SpanAttributes.HTTP_URL]
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
# Copyright 2020, OpenTelemetry Authors
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPMethod(Enum):
|
|
||||||
"""HTTP methods and descriptions"""
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"{self.value}"
|
|
||||||
|
|
||||||
CONNECT = 'CONNECT'
|
|
||||||
DELETE = 'DELETE'
|
|
||||||
GET = 'GET'
|
|
||||||
HEAD = 'HEAD'
|
|
||||||
OPTIONS = 'OPTIONS'
|
|
||||||
PATCH = 'PATCH'
|
|
||||||
POST = 'POST'
|
|
||||||
PUT = 'PUT'
|
|
||||||
TRACE = 'TRACE'
|
|
||||||
|
|
@ -215,6 +215,7 @@ class _PoolContextManager(_ContextManager):
|
||||||
|
|
||||||
|
|
||||||
class _PoolAcquireContextManager(_ContextManager):
|
class _PoolAcquireContextManager(_ContextManager):
|
||||||
|
# pylint: disable=redefined-slots-in-subclass
|
||||||
__slots__ = ("_coro", "_obj", "_pool")
|
__slots__ = ("_coro", "_obj", "_pool")
|
||||||
|
|
||||||
def __init__(self, coro, pool):
|
def __init__(self, coro, pool):
|
||||||
|
|
|
||||||
|
|
@ -710,6 +710,7 @@ class OpenTelemetryMiddleware:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
await send(message)
|
await send(message)
|
||||||
|
# pylint: disable=too-many-boolean-expressions
|
||||||
if (
|
if (
|
||||||
not expecting_trailers
|
not expecting_trailers
|
||||||
and message["type"] == "http.response.body"
|
and message["type"] == "http.response.body"
|
||||||
|
|
|
||||||
|
|
@ -227,6 +227,7 @@ async def error_asgi(scope, receive, send):
|
||||||
await send({"type": "http.response.body", "body": b"*"})
|
await send({"type": "http.response.body", "body": b"*"})
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=too-many-public-methods
|
||||||
class TestAsgiApplication(AsgiTestBase):
|
class TestAsgiApplication(AsgiTestBase):
|
||||||
def validate_outputs(self, outputs, error=None, modifiers=None):
|
def validate_outputs(self, outputs, error=None, modifiers=None):
|
||||||
# Ensure modifiers is a list
|
# Ensure modifiers is a list
|
||||||
|
|
|
||||||
|
|
@ -84,7 +84,7 @@ def _hydrate_span_from_args(connection, query, parameters) -> dict:
|
||||||
span_attributes[SpanAttributes.NET_PEER_NAME] = addr
|
span_attributes[SpanAttributes.NET_PEER_NAME] = addr
|
||||||
span_attributes[
|
span_attributes[
|
||||||
SpanAttributes.NET_TRANSPORT
|
SpanAttributes.NET_TRANSPORT
|
||||||
] = NetTransportValues.UNIX.value
|
] = NetTransportValues.OTHER.value
|
||||||
|
|
||||||
if query is not None:
|
if query is not None:
|
||||||
span_attributes[SpanAttributes.DB_STATEMENT] = query
|
span_attributes[SpanAttributes.DB_STATEMENT] = query
|
||||||
|
|
|
||||||
|
|
@ -17,10 +17,12 @@ from importlib import import_module
|
||||||
from typing import Any, Callable, Dict
|
from typing import Any, Callable, Dict
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from mocks.api_gateway_http_api_event import (
|
from tests.mocks.api_gateway_http_api_event import (
|
||||||
MOCK_LAMBDA_API_GATEWAY_HTTP_API_EVENT,
|
MOCK_LAMBDA_API_GATEWAY_HTTP_API_EVENT,
|
||||||
)
|
)
|
||||||
from mocks.api_gateway_proxy_event import MOCK_LAMBDA_API_GATEWAY_PROXY_EVENT
|
from tests.mocks.api_gateway_proxy_event import (
|
||||||
|
MOCK_LAMBDA_API_GATEWAY_PROXY_EVENT,
|
||||||
|
)
|
||||||
|
|
||||||
from opentelemetry.environment_variables import OTEL_PROPAGATORS
|
from opentelemetry.environment_variables import OTEL_PROPAGATORS
|
||||||
from opentelemetry.instrumentation.aws_lambda import (
|
from opentelemetry.instrumentation.aws_lambda import (
|
||||||
|
|
@ -103,7 +105,7 @@ class TestAwsLambdaInstrumentor(TestBase):
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.common_env_patch = mock.patch.dict(
|
self.common_env_patch = mock.patch.dict(
|
||||||
"os.environ",
|
"os.environ",
|
||||||
{_HANDLER: "mocks.lambda_function.handler"},
|
{_HANDLER: "tests.mocks.lambda_function.handler"},
|
||||||
)
|
)
|
||||||
self.common_env_patch.start()
|
self.common_env_patch.start()
|
||||||
|
|
||||||
|
|
@ -356,7 +358,7 @@ class TestAwsLambdaInstrumentor(TestBase):
|
||||||
def test_api_gateway_proxy_event_sets_attributes(self):
|
def test_api_gateway_proxy_event_sets_attributes(self):
|
||||||
handler_patch = mock.patch.dict(
|
handler_patch = mock.patch.dict(
|
||||||
"os.environ",
|
"os.environ",
|
||||||
{_HANDLER: "mocks.lambda_function.rest_api_handler"},
|
{_HANDLER: "tests.mocks.lambda_function.rest_api_handler"},
|
||||||
)
|
)
|
||||||
handler_patch.start()
|
handler_patch.start()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -28,6 +28,7 @@ from opentelemetry.semconv.trace import DbSystemValues, SpanAttributes
|
||||||
from opentelemetry.trace.span import Span
|
from opentelemetry.trace.span import Span
|
||||||
from opentelemetry.util.types import AttributeValue
|
from opentelemetry.util.types import AttributeValue
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
_AttributePathT = Union[str, Tuple[str]]
|
_AttributePathT = Union[str, Tuple[str]]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -99,13 +99,13 @@ class _OpInvoke(_LambdaOperation):
|
||||||
# Lambda extension
|
# Lambda extension
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
_OPERATION_MAPPING = {
|
_OPERATION_MAPPING: Dict[str, _LambdaOperation] = {
|
||||||
op.operation_name(): op
|
op.operation_name(): op
|
||||||
for op in globals().values()
|
for op in globals().values()
|
||||||
if inspect.isclass(op)
|
if inspect.isclass(op)
|
||||||
and issubclass(op, _LambdaOperation)
|
and issubclass(op, _LambdaOperation)
|
||||||
and not inspect.isabstract(op)
|
and not inspect.isabstract(op)
|
||||||
} # type: Dict[str, _LambdaOperation]
|
}
|
||||||
|
|
||||||
|
|
||||||
class _LambdaExtension(_AwsSdkExtension):
|
class _LambdaExtension(_AwsSdkExtension):
|
||||||
|
|
|
||||||
|
|
@ -82,7 +82,9 @@ class _OpPublish(_SnsOperation):
|
||||||
attributes[SpanAttributes.MESSAGING_DESTINATION] = destination_name
|
attributes[SpanAttributes.MESSAGING_DESTINATION] = destination_name
|
||||||
|
|
||||||
# TODO: Use SpanAttributes.MESSAGING_DESTINATION_NAME when opentelemetry-semantic-conventions 0.42b0 is released
|
# TODO: Use SpanAttributes.MESSAGING_DESTINATION_NAME when opentelemetry-semantic-conventions 0.42b0 is released
|
||||||
attributes["messaging.destination.name"] = cls._extract_input_arn(call_context)
|
attributes["messaging.destination.name"] = cls._extract_input_arn(
|
||||||
|
call_context
|
||||||
|
)
|
||||||
call_context.span_name = (
|
call_context.span_name = (
|
||||||
f"{'phone_number' if is_phone_number else destination_name} send"
|
f"{'phone_number' if is_phone_number else destination_name} send"
|
||||||
)
|
)
|
||||||
|
|
@ -141,13 +143,13 @@ class _OpPublishBatch(_OpPublish):
|
||||||
# SNS extension
|
# SNS extension
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
_OPERATION_MAPPING = {
|
_OPERATION_MAPPING: Dict[str, _SnsOperation] = {
|
||||||
op.operation_name(): op
|
op.operation_name(): op
|
||||||
for op in globals().values()
|
for op in globals().values()
|
||||||
if inspect.isclass(op)
|
if inspect.isclass(op)
|
||||||
and issubclass(op, _SnsOperation)
|
and issubclass(op, _SnsOperation)
|
||||||
and not inspect.isabstract(op)
|
and not inspect.isabstract(op)
|
||||||
} # type: Dict[str, _SnsOperation]
|
}
|
||||||
|
|
||||||
|
|
||||||
class _SnsExtension(_AwsSdkExtension):
|
class _SnsExtension(_AwsSdkExtension):
|
||||||
|
|
|
||||||
|
|
@ -57,23 +57,21 @@ class _AwsSdkCallContext:
|
||||||
boto_meta = client.meta
|
boto_meta = client.meta
|
||||||
service_model = boto_meta.service_model
|
service_model = boto_meta.service_model
|
||||||
|
|
||||||
self.service = service_model.service_name.lower() # type: str
|
self.service = service_model.service_name.lower()
|
||||||
self.operation = operation # type: str
|
self.operation = operation
|
||||||
self.params = params # type: Dict[str, Any]
|
self.params = params
|
||||||
|
|
||||||
# 'operation' and 'service' are essential for instrumentation.
|
# 'operation' and 'service' are essential for instrumentation.
|
||||||
# for all other attributes we extract them defensively. All of them should
|
# for all other attributes we extract them defensively. All of them should
|
||||||
# usually exist unless some future botocore version moved things.
|
# usually exist unless some future botocore version moved things.
|
||||||
self.region = self._get_attr(
|
self.region: Optional[str] = self._get_attr(boto_meta, "region_name")
|
||||||
boto_meta, "region_name"
|
self.endpoint_url: Optional[str] = self._get_attr(
|
||||||
) # type: Optional[str]
|
|
||||||
self.endpoint_url = self._get_attr(
|
|
||||||
boto_meta, "endpoint_url"
|
boto_meta, "endpoint_url"
|
||||||
) # type: Optional[str]
|
)
|
||||||
|
|
||||||
self.api_version = self._get_attr(
|
self.api_version: Optional[str] = self._get_attr(
|
||||||
service_model, "api_version"
|
service_model, "api_version"
|
||||||
) # type: Optional[str]
|
)
|
||||||
# name of the service in proper casing
|
# name of the service in proper casing
|
||||||
self.service_id = str(
|
self.service_id = str(
|
||||||
self._get_attr(service_model, "service_id", self.service)
|
self._get_attr(service_model, "service_id", self.service)
|
||||||
|
|
|
||||||
|
|
@ -122,7 +122,7 @@ class TestSnsExtension(TestBase):
|
||||||
target_arn,
|
target_arn,
|
||||||
# TODO: Use SpanAttributes.MESSAGING_DESTINATION_NAME when
|
# TODO: Use SpanAttributes.MESSAGING_DESTINATION_NAME when
|
||||||
# opentelemetry-semantic-conventions 0.42b0 is released
|
# opentelemetry-semantic-conventions 0.42b0 is released
|
||||||
span.attributes["messaging.destination.name"]
|
span.attributes["messaging.destination.name"],
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock_sns
|
@mock_sns
|
||||||
|
|
@ -194,7 +194,7 @@ class TestSnsExtension(TestBase):
|
||||||
topic_arn,
|
topic_arn,
|
||||||
# TODO: Use SpanAttributes.MESSAGING_DESTINATION_NAME when
|
# TODO: Use SpanAttributes.MESSAGING_DESTINATION_NAME when
|
||||||
# opentelemetry-semantic-conventions 0.42b0 is released
|
# opentelemetry-semantic-conventions 0.42b0 is released
|
||||||
span.attributes["messaging.destination.name"]
|
span.attributes["messaging.destination.name"],
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assert_injected_span(message1_attrs, span)
|
self.assert_injected_span(message1_attrs, span)
|
||||||
|
|
|
||||||
|
|
@ -43,9 +43,9 @@ import cassandra.cluster
|
||||||
from wrapt import wrap_function_wrapper
|
from wrapt import wrap_function_wrapper
|
||||||
|
|
||||||
from opentelemetry import trace
|
from opentelemetry import trace
|
||||||
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
||||||
from opentelemetry.instrumentation.cassandra.package import _instruments
|
from opentelemetry.instrumentation.cassandra.package import _instruments
|
||||||
from opentelemetry.instrumentation.cassandra.version import __version__
|
from opentelemetry.instrumentation.cassandra.version import __version__
|
||||||
|
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
||||||
from opentelemetry.instrumentation.utils import unwrap
|
from opentelemetry.instrumentation.utils import unwrap
|
||||||
from opentelemetry.semconv.trace import SpanAttributes
|
from opentelemetry.semconv.trace import SpanAttributes
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -63,6 +63,7 @@ import logging
|
||||||
from timeit import default_timer
|
from timeit import default_timer
|
||||||
from typing import Collection, Iterable
|
from typing import Collection, Iterable
|
||||||
|
|
||||||
|
from billiard import VERSION
|
||||||
from billiard.einfo import ExceptionInfo
|
from billiard.einfo import ExceptionInfo
|
||||||
from celery import signals # pylint: disable=no-name-in-module
|
from celery import signals # pylint: disable=no-name-in-module
|
||||||
|
|
||||||
|
|
@ -76,8 +77,6 @@ from opentelemetry.propagate import extract, inject
|
||||||
from opentelemetry.propagators.textmap import Getter
|
from opentelemetry.propagators.textmap import Getter
|
||||||
from opentelemetry.semconv.trace import SpanAttributes
|
from opentelemetry.semconv.trace import SpanAttributes
|
||||||
from opentelemetry.trace.status import Status, StatusCode
|
from opentelemetry.trace.status import Status, StatusCode
|
||||||
from billiard import VERSION
|
|
||||||
|
|
||||||
|
|
||||||
if VERSION >= (4, 0, 1):
|
if VERSION >= (4, 0, 1):
|
||||||
from billiard.einfo import ExceptionWithTraceback
|
from billiard.einfo import ExceptionWithTraceback
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,6 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from celery import registry # pylint: disable=no-name-in-module
|
from celery import registry # pylint: disable=no-name-in-module
|
||||||
from billiard import VERSION
|
|
||||||
|
|
||||||
from opentelemetry.semconv.trace import SpanAttributes
|
from opentelemetry.semconv.trace import SpanAttributes
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -107,16 +107,15 @@ from opentelemetry import context, propagate, trace
|
||||||
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
||||||
from opentelemetry.instrumentation.utils import unwrap
|
from opentelemetry.instrumentation.utils import unwrap
|
||||||
from opentelemetry.semconv.trace import MessagingOperationValues
|
from opentelemetry.semconv.trace import MessagingOperationValues
|
||||||
from opentelemetry.trace import Link, SpanKind, Tracer
|
from opentelemetry.trace import Tracer
|
||||||
|
|
||||||
from .package import _instruments
|
from .package import _instruments
|
||||||
from .utils import (
|
from .utils import (
|
||||||
KafkaPropertiesExtractor,
|
KafkaPropertiesExtractor,
|
||||||
_end_current_consume_span,
|
|
||||||
_create_new_consume_span,
|
_create_new_consume_span,
|
||||||
|
_end_current_consume_span,
|
||||||
_enrich_span,
|
_enrich_span,
|
||||||
_get_span_name,
|
_get_span_name,
|
||||||
_kafka_getter,
|
|
||||||
_kafka_setter,
|
_kafka_setter,
|
||||||
)
|
)
|
||||||
from .version import __version__
|
from .version import __version__
|
||||||
|
|
|
||||||
|
|
@ -2,13 +2,13 @@ from logging import getLogger
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from opentelemetry import context, propagate
|
from opentelemetry import context, propagate
|
||||||
from opentelemetry.trace import SpanKind, Link
|
|
||||||
from opentelemetry.propagators import textmap
|
from opentelemetry.propagators import textmap
|
||||||
from opentelemetry.semconv.trace import (
|
from opentelemetry.semconv.trace import (
|
||||||
MessagingDestinationKindValues,
|
MessagingDestinationKindValues,
|
||||||
MessagingOperationValues,
|
MessagingOperationValues,
|
||||||
SpanAttributes,
|
SpanAttributes,
|
||||||
)
|
)
|
||||||
|
from opentelemetry.trace import Link, SpanKind
|
||||||
|
|
||||||
_LOG = getLogger(__name__)
|
_LOG = getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,13 +14,6 @@
|
||||||
|
|
||||||
# pylint: disable=no-name-in-module
|
# pylint: disable=no-name-in-module
|
||||||
|
|
||||||
from opentelemetry.semconv.trace import (
|
|
||||||
SpanAttributes,
|
|
||||||
MessagingDestinationKindValues,
|
|
||||||
)
|
|
||||||
from opentelemetry.test.test_base import TestBase
|
|
||||||
from .utils import MockConsumer, MockedMessage
|
|
||||||
|
|
||||||
from confluent_kafka import Consumer, Producer
|
from confluent_kafka import Consumer, Producer
|
||||||
|
|
||||||
from opentelemetry.instrumentation.confluent_kafka import (
|
from opentelemetry.instrumentation.confluent_kafka import (
|
||||||
|
|
@ -32,6 +25,13 @@ from opentelemetry.instrumentation.confluent_kafka.utils import (
|
||||||
KafkaContextGetter,
|
KafkaContextGetter,
|
||||||
KafkaContextSetter,
|
KafkaContextSetter,
|
||||||
)
|
)
|
||||||
|
from opentelemetry.semconv.trace import (
|
||||||
|
MessagingDestinationKindValues,
|
||||||
|
SpanAttributes,
|
||||||
|
)
|
||||||
|
from opentelemetry.test.test_base import TestBase
|
||||||
|
|
||||||
|
from .utils import MockConsumer, MockedMessage
|
||||||
|
|
||||||
|
|
||||||
class TestConfluentKafka(TestBase):
|
class TestConfluentKafka(TestBase):
|
||||||
|
|
|
||||||
|
|
@ -427,14 +427,14 @@ class CursorTracer:
|
||||||
if args and self._commenter_enabled:
|
if args and self._commenter_enabled:
|
||||||
try:
|
try:
|
||||||
args_list = list(args)
|
args_list = list(args)
|
||||||
commenter_data = dict(
|
commenter_data = {
|
||||||
# Psycopg2/framework information
|
# Psycopg2/framework information
|
||||||
db_driver=f"psycopg2:{self._connect_module.__version__.split(' ')[0]}",
|
"db_driver": f"psycopg2:{self._connect_module.__version__.split(' ')[0]}",
|
||||||
dbapi_threadsafety=self._connect_module.threadsafety,
|
"dbapi_threadsafety": self._connect_module.threadsafety,
|
||||||
dbapi_level=self._connect_module.apilevel,
|
"dbapi_level": self._connect_module.apilevel,
|
||||||
libpq_version=self._connect_module.__libpq_version__,
|
"libpq_version": self._connect_module.__libpq_version__,
|
||||||
driver_paramstyle=self._connect_module.paramstyle,
|
"driver_paramstyle": self._connect_module.paramstyle,
|
||||||
)
|
}
|
||||||
if self._commenter_options.get(
|
if self._commenter_options.get(
|
||||||
"opentelemetry_values", True
|
"opentelemetry_values", True
|
||||||
):
|
):
|
||||||
|
|
|
||||||
|
|
@ -172,6 +172,7 @@ class ElasticsearchInstrumentor(BaseInstrumentor):
|
||||||
)
|
)
|
||||||
|
|
||||||
def _uninstrument(self, **kwargs):
|
def _uninstrument(self, **kwargs):
|
||||||
|
# pylint: disable=no-member
|
||||||
unwrap(elasticsearch.Transport, "perform_request")
|
unwrap(elasticsearch.Transport, "perform_request")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
# pylint: disable=unexpected-keyword-arg,missing-kwoa,no-value-for-parameter
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
|
||||||
|
|
@ -40,7 +40,6 @@ from opentelemetry.util.http import (
|
||||||
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS,
|
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS,
|
||||||
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST,
|
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST,
|
||||||
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE,
|
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE,
|
||||||
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE,
|
|
||||||
OTEL_PYTHON_INSTRUMENTATION_HTTP_CAPTURE_ALL_METHODS,
|
OTEL_PYTHON_INSTRUMENTATION_HTTP_CAPTURE_ALL_METHODS,
|
||||||
get_excluded_urls,
|
get_excluded_urls,
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -77,7 +77,7 @@ class OpenTelemetryAioServerInterceptor(
|
||||||
# we handle in our context wrapper.
|
# we handle in our context wrapper.
|
||||||
# Here, we're interested in uncaught exceptions.
|
# Here, we're interested in uncaught exceptions.
|
||||||
# pylint:disable=unidiomatic-typecheck
|
# pylint:disable=unidiomatic-typecheck
|
||||||
if type(error) != Exception:
|
if type(error) != Exception: # noqa: E721
|
||||||
span.record_exception(error)
|
span.record_exception(error)
|
||||||
raise error
|
raise error
|
||||||
|
|
||||||
|
|
@ -101,7 +101,7 @@ class OpenTelemetryAioServerInterceptor(
|
||||||
|
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
# pylint:disable=unidiomatic-typecheck
|
# pylint:disable=unidiomatic-typecheck
|
||||||
if type(error) != Exception:
|
if type(error) != Exception: # noqa: E721
|
||||||
span.record_exception(error)
|
span.record_exception(error)
|
||||||
raise error
|
raise error
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -315,7 +315,7 @@ class OpenTelemetryServerInterceptor(grpc.ServerInterceptor):
|
||||||
# we handle in our context wrapper.
|
# we handle in our context wrapper.
|
||||||
# Here, we're interested in uncaught exceptions.
|
# Here, we're interested in uncaught exceptions.
|
||||||
# pylint:disable=unidiomatic-typecheck
|
# pylint:disable=unidiomatic-typecheck
|
||||||
if type(error) != Exception:
|
if type(error) != Exception: # noqa: E721
|
||||||
span.record_exception(error)
|
span.record_exception(error)
|
||||||
raise error
|
raise error
|
||||||
|
|
||||||
|
|
@ -342,6 +342,6 @@ class OpenTelemetryServerInterceptor(grpc.ServerInterceptor):
|
||||||
|
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
# pylint:disable=unidiomatic-typecheck
|
# pylint:disable=unidiomatic-typecheck
|
||||||
if type(error) != Exception:
|
if type(error) != Exception: # noqa: E721
|
||||||
span.record_exception(error)
|
span.record_exception(error)
|
||||||
raise error
|
raise error
|
||||||
|
|
|
||||||
|
|
@ -17,13 +17,14 @@ from typing import Callable, TypeVar
|
||||||
|
|
||||||
import grpc
|
import grpc
|
||||||
|
|
||||||
TCallDetails = TypeVar(
|
CallDetailsT = TypeVar(
|
||||||
"TCallDetails",
|
"CallDetailsT",
|
||||||
grpc.HandlerCallDetails,
|
grpc.HandlerCallDetails,
|
||||||
grpc.ClientCallDetails,
|
grpc.ClientCallDetails,
|
||||||
grpc.aio.ClientCallDetails,
|
grpc.aio.ClientCallDetails,
|
||||||
)
|
)
|
||||||
Condition = Callable[[TCallDetails], bool]
|
# pylint: disable=invalid-name
|
||||||
|
Condition = Callable[[CallDetailsT], bool]
|
||||||
|
|
||||||
|
|
||||||
def _full_method(metadata):
|
def _full_method(metadata):
|
||||||
|
|
@ -61,7 +62,7 @@ def _split_full_method(metadata):
|
||||||
return (service, method)
|
return (service, method)
|
||||||
|
|
||||||
|
|
||||||
def all_of(*args: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
def all_of(*args: Condition[CallDetailsT]) -> Condition[CallDetailsT]:
|
||||||
"""Returns a filter function that returns True if all filter functions
|
"""Returns a filter function that returns True if all filter functions
|
||||||
assigned matches conditions.
|
assigned matches conditions.
|
||||||
|
|
||||||
|
|
@ -79,7 +80,7 @@ def all_of(*args: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
||||||
return filter_fn
|
return filter_fn
|
||||||
|
|
||||||
|
|
||||||
def any_of(*args: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
def any_of(*args: Condition[CallDetailsT]) -> Condition[CallDetailsT]:
|
||||||
"""Returns a filter function that returns True if any of filter functions
|
"""Returns a filter function that returns True if any of filter functions
|
||||||
assigned matches conditions.
|
assigned matches conditions.
|
||||||
|
|
||||||
|
|
@ -97,7 +98,7 @@ def any_of(*args: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
||||||
return filter_fn
|
return filter_fn
|
||||||
|
|
||||||
|
|
||||||
def negate(func: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
def negate(func: Condition[CallDetailsT]) -> Condition[CallDetailsT]:
|
||||||
"""Returns a filter function that negate the result of func
|
"""Returns a filter function that negate the result of func
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -113,7 +114,7 @@ def negate(func: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
||||||
return filter_fn
|
return filter_fn
|
||||||
|
|
||||||
|
|
||||||
def method_name(name: str) -> Condition[TCallDetails]:
|
def method_name(name: str) -> Condition[CallDetailsT]:
|
||||||
"""Returns a filter function that return True if
|
"""Returns a filter function that return True if
|
||||||
request's gRPC method name matches name.
|
request's gRPC method name matches name.
|
||||||
|
|
||||||
|
|
@ -132,7 +133,7 @@ def method_name(name: str) -> Condition[TCallDetails]:
|
||||||
return filter_fn
|
return filter_fn
|
||||||
|
|
||||||
|
|
||||||
def method_prefix(prefix: str) -> Condition[TCallDetails]:
|
def method_prefix(prefix: str) -> Condition[CallDetailsT]:
|
||||||
"""Returns a filter function that return True if
|
"""Returns a filter function that return True if
|
||||||
request's gRPC method name starts with prefix.
|
request's gRPC method name starts with prefix.
|
||||||
|
|
||||||
|
|
@ -151,7 +152,7 @@ def method_prefix(prefix: str) -> Condition[TCallDetails]:
|
||||||
return filter_fn
|
return filter_fn
|
||||||
|
|
||||||
|
|
||||||
def full_method_name(name: str) -> Condition[TCallDetails]:
|
def full_method_name(name: str) -> Condition[CallDetailsT]:
|
||||||
"""Returns a filter function that return True if
|
"""Returns a filter function that return True if
|
||||||
request's gRPC full method name matches name.
|
request's gRPC full method name matches name.
|
||||||
|
|
||||||
|
|
@ -170,7 +171,7 @@ def full_method_name(name: str) -> Condition[TCallDetails]:
|
||||||
return filter_fn
|
return filter_fn
|
||||||
|
|
||||||
|
|
||||||
def service_name(name: str) -> Condition[TCallDetails]:
|
def service_name(name: str) -> Condition[CallDetailsT]:
|
||||||
"""Returns a filter function that return True if
|
"""Returns a filter function that return True if
|
||||||
request's gRPC service name matches name.
|
request's gRPC service name matches name.
|
||||||
|
|
||||||
|
|
@ -189,7 +190,7 @@ def service_name(name: str) -> Condition[TCallDetails]:
|
||||||
return filter_fn
|
return filter_fn
|
||||||
|
|
||||||
|
|
||||||
def service_prefix(prefix: str) -> Condition[TCallDetails]:
|
def service_prefix(prefix: str) -> Condition[CallDetailsT]:
|
||||||
"""Returns a filter function that return True if
|
"""Returns a filter function that return True if
|
||||||
request's gRPC service name starts with prefix.
|
request's gRPC service name starts with prefix.
|
||||||
|
|
||||||
|
|
@ -208,7 +209,7 @@ def service_prefix(prefix: str) -> Condition[TCallDetails]:
|
||||||
return filter_fn
|
return filter_fn
|
||||||
|
|
||||||
|
|
||||||
def health_check() -> Condition[TCallDetails]:
|
def health_check() -> Condition[CallDetailsT]:
|
||||||
"""Returns a Filter that returns true if the request's
|
"""Returns a Filter that returns true if the request's
|
||||||
service name is health check defined by gRPC Health Checking Protocol.
|
service name is health check defined by gRPC Health Checking Protocol.
|
||||||
https://github.com/grpc/grpc/blob/master/doc/health-checking.md
|
https://github.com/grpc/grpc/blob/master/doc/health-checking.md
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
# pylint:disable=cyclic-import
|
||||||
|
|
||||||
import grpc
|
import grpc
|
||||||
from tests.protobuf import ( # pylint: disable=no-name-in-module
|
from tests.protobuf import ( # pylint: disable=no-name-in-module
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
# pylint:disable=cyclic-import
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,18 @@
|
||||||
|
# Copyright The OpenTelemetry Authors
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# pylint: disable=unnecessary-dunder-call
|
||||||
|
|
||||||
from unittest import TestCase, mock
|
from unittest import TestCase, mock
|
||||||
|
|
||||||
from opentelemetry.instrumentation.kafka.utils import (
|
from opentelemetry.instrumentation.kafka.utils import (
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,8 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
# pylint: disable=unnecessary-dunder-call
|
||||||
|
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from typing import Any, Collection, Dict, Optional
|
from typing import Any, Collection, Dict, Optional
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -61,7 +61,7 @@ class MockConnection:
|
||||||
return MockCursor()
|
return MockCursor()
|
||||||
|
|
||||||
def get_dsn_parameters(self): # pylint: disable=no-self-use
|
def get_dsn_parameters(self): # pylint: disable=no-self-use
|
||||||
return dict(dbname="test")
|
return {"dbname": "test"}
|
||||||
|
|
||||||
|
|
||||||
class TestPostgresqlIntegration(TestBase):
|
class TestPostgresqlIntegration(TestBase):
|
||||||
|
|
|
||||||
|
|
@ -169,7 +169,7 @@ def _get_address_attributes(instance):
|
||||||
address_attributes[SpanAttributes.NET_PEER_NAME] = instance.server
|
address_attributes[SpanAttributes.NET_PEER_NAME] = instance.server
|
||||||
address_attributes[
|
address_attributes[
|
||||||
SpanAttributes.NET_TRANSPORT
|
SpanAttributes.NET_TRANSPORT
|
||||||
] = NetTransportValues.UNIX.value
|
] = NetTransportValues.OTHER.value
|
||||||
|
|
||||||
return address_attributes
|
return address_attributes
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -24,14 +24,15 @@ from pymemcache.exceptions import (
|
||||||
MemcacheUnknownError,
|
MemcacheUnknownError,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# pylint: disable=import-error,no-name-in-module
|
||||||
|
from tests.utils import MockSocket, _str
|
||||||
|
|
||||||
from opentelemetry import trace as trace_api
|
from opentelemetry import trace as trace_api
|
||||||
from opentelemetry.instrumentation.pymemcache import PymemcacheInstrumentor
|
from opentelemetry.instrumentation.pymemcache import PymemcacheInstrumentor
|
||||||
from opentelemetry.semconv.trace import SpanAttributes
|
from opentelemetry.semconv.trace import SpanAttributes
|
||||||
from opentelemetry.test.test_base import TestBase
|
from opentelemetry.test.test_base import TestBase
|
||||||
from opentelemetry.trace import get_tracer
|
from opentelemetry.trace import get_tracer
|
||||||
|
|
||||||
from .utils import MockSocket, _str
|
|
||||||
|
|
||||||
TEST_HOST = "localhost"
|
TEST_HOST = "localhost"
|
||||||
TEST_PORT = 117711
|
TEST_PORT = 117711
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ def _extract_conn_attributes(conn_kwargs):
|
||||||
attributes[SpanAttributes.NET_PEER_NAME] = conn_kwargs.get("path", "")
|
attributes[SpanAttributes.NET_PEER_NAME] = conn_kwargs.get("path", "")
|
||||||
attributes[
|
attributes[
|
||||||
SpanAttributes.NET_TRANSPORT
|
SpanAttributes.NET_TRANSPORT
|
||||||
] = NetTransportValues.UNIX.value
|
] = NetTransportValues.OTHER.value
|
||||||
|
|
||||||
return attributes
|
return attributes
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -456,7 +456,7 @@ class TestRequestsIntegration(RequestsIntegrationTestBase, TestBase):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def perform_request(url: str, session: requests.Session = None):
|
def perform_request(url: str, session: requests.Session = None):
|
||||||
if session is None:
|
if session is None:
|
||||||
return requests.get(url)
|
return requests.get(url, timeout=5)
|
||||||
return session.get(url)
|
return session.get(url)
|
||||||
|
|
||||||
def test_credential_removal(self):
|
def test_credential_removal(self):
|
||||||
|
|
@ -467,7 +467,7 @@ class TestRequestsIntegration(RequestsIntegrationTestBase, TestBase):
|
||||||
self.assertEqual(span.attributes[SpanAttributes.HTTP_URL], self.URL)
|
self.assertEqual(span.attributes[SpanAttributes.HTTP_URL], self.URL)
|
||||||
|
|
||||||
def test_if_headers_equals_none(self):
|
def test_if_headers_equals_none(self):
|
||||||
result = requests.get(self.URL, headers=None)
|
result = requests.get(self.URL, headers=None, timeout=5)
|
||||||
self.assertEqual(result.text, "Hello!")
|
self.assertEqual(result.text, "Hello!")
|
||||||
self.assert_span()
|
self.assert_span()
|
||||||
|
|
||||||
|
|
@ -501,7 +501,7 @@ class TestRequestsIntergrationMetric(TestBase):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def perform_request(url: str) -> requests.Response:
|
def perform_request(url: str) -> requests.Response:
|
||||||
return requests.get(url)
|
return requests.get(url, timeout=5)
|
||||||
|
|
||||||
def test_basic_metric_success(self):
|
def test_basic_metric_success(self):
|
||||||
self.perform_request(self.URL)
|
self.perform_request(self.URL)
|
||||||
|
|
|
||||||
|
|
@ -38,7 +38,7 @@ class TestURLLib3InstrumentorWithRealSocket(HttpTestBase, TestBase):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def perform_request(url: str) -> requests.Response:
|
def perform_request(url: str) -> requests.Response:
|
||||||
return requests.get(url)
|
return requests.get(url, timeout=5)
|
||||||
|
|
||||||
def test_basic_http_success(self):
|
def test_basic_http_success(self):
|
||||||
response = self.perform_request(self.http_url)
|
response = self.perform_request(self.http_url)
|
||||||
|
|
|
||||||
|
|
@ -82,6 +82,8 @@ from sklearn.tree import BaseDecisionTree
|
||||||
from sklearn.utils.metaestimators import _IffHasAttrDescriptor
|
from sklearn.utils.metaestimators import _IffHasAttrDescriptor
|
||||||
|
|
||||||
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
||||||
|
|
||||||
|
# pylint: disable=no-name-in-module
|
||||||
from opentelemetry.instrumentation.sklearn.package import _instruments
|
from opentelemetry.instrumentation.sklearn.package import _instruments
|
||||||
from opentelemetry.instrumentation.sklearn.version import __version__
|
from opentelemetry.instrumentation.sklearn.version import __version__
|
||||||
from opentelemetry.trace import get_tracer
|
from opentelemetry.trace import get_tracer
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@
|
||||||
|
|
||||||
from sklearn.ensemble import RandomForestClassifier
|
from sklearn.ensemble import RandomForestClassifier
|
||||||
|
|
||||||
|
# pylint: disable=no-name-in-module
|
||||||
from opentelemetry.instrumentation.sklearn import (
|
from opentelemetry.instrumentation.sklearn import (
|
||||||
DEFAULT_EXCLUDE_CLASSES,
|
DEFAULT_EXCLUDE_CLASSES,
|
||||||
DEFAULT_METHODS,
|
DEFAULT_METHODS,
|
||||||
|
|
|
||||||
|
|
@ -224,11 +224,11 @@ class EngineTracer:
|
||||||
for key, value in attrs.items():
|
for key, value in attrs.items():
|
||||||
span.set_attribute(key, value)
|
span.set_attribute(key, value)
|
||||||
if self.enable_commenter:
|
if self.enable_commenter:
|
||||||
commenter_data = dict(
|
commenter_data = {
|
||||||
db_driver=conn.engine.driver,
|
"db_driver": conn.engine.driver,
|
||||||
# Driver/framework centric information.
|
# Driver/framework centric information.
|
||||||
db_framework=f"sqlalchemy:{__version__}",
|
"db_framework": f"sqlalchemy:{__version__}",
|
||||||
)
|
}
|
||||||
|
|
||||||
if self.commenter_options.get("opentelemetry_values", True):
|
if self.commenter_options.get("opentelemetry_values", True):
|
||||||
commenter_data.update(**_get_opentelemetry_values())
|
commenter_data.update(**_get_opentelemetry_values())
|
||||||
|
|
@ -296,7 +296,9 @@ def _get_attributes_from_cursor(vendor, cursor, attrs):
|
||||||
is_unix_socket = info.host and info.host.startswith("/")
|
is_unix_socket = info.host and info.host.startswith("/")
|
||||||
|
|
||||||
if is_unix_socket:
|
if is_unix_socket:
|
||||||
attrs[SpanAttributes.NET_TRANSPORT] = NetTransportValues.UNIX.value
|
attrs[
|
||||||
|
SpanAttributes.NET_TRANSPORT
|
||||||
|
] = NetTransportValues.OTHER.value
|
||||||
if info.port:
|
if info.port:
|
||||||
# postgresql enforces this pattern on all socket names
|
# postgresql enforces this pattern on all socket names
|
||||||
attrs[SpanAttributes.NET_PEER_NAME] = os.path.join(
|
attrs[SpanAttributes.NET_PEER_NAME] = os.path.join(
|
||||||
|
|
|
||||||
|
|
@ -76,9 +76,9 @@ API
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import gc
|
import gc
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import logging
|
|
||||||
import threading
|
import threading
|
||||||
from platform import python_implementation
|
from platform import python_implementation
|
||||||
from typing import Collection, Dict, Iterable, List, Optional
|
from typing import Collection, Dict, Iterable, List, Optional
|
||||||
|
|
@ -363,7 +363,7 @@ class SystemMetricsInstrumentor(BaseInstrumentor):
|
||||||
if "process.runtime.gc_count" in self._config:
|
if "process.runtime.gc_count" in self._config:
|
||||||
if self._python_implementation == "pypy":
|
if self._python_implementation == "pypy":
|
||||||
_logger.warning(
|
_logger.warning(
|
||||||
"The process.runtime.gc_count metric won't be collected because the interpreter is PyPy"
|
"The process.runtime.gc_count metric won't be collected because the interpreter is PyPy"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self._meter.create_observable_counter(
|
self._meter.create_observable_counter(
|
||||||
|
|
@ -372,7 +372,6 @@ class SystemMetricsInstrumentor(BaseInstrumentor):
|
||||||
description=f"Runtime {self._python_implementation} GC count",
|
description=f"Runtime {self._python_implementation} GC count",
|
||||||
unit="bytes",
|
unit="bytes",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if "process.runtime.thread_count" in self._config:
|
if "process.runtime.thread_count" in self._config:
|
||||||
self._meter.create_observable_up_down_counter(
|
self._meter.create_observable_up_down_counter(
|
||||||
|
|
|
||||||
|
|
@ -18,13 +18,12 @@ from collections import namedtuple
|
||||||
from platform import python_implementation
|
from platform import python_implementation
|
||||||
from unittest import mock, skipIf
|
from unittest import mock, skipIf
|
||||||
|
|
||||||
from opentelemetry.sdk.metrics import MeterProvider
|
|
||||||
from opentelemetry.sdk.metrics.export import InMemoryMetricReader
|
|
||||||
from opentelemetry.test.test_base import TestBase
|
|
||||||
|
|
||||||
from opentelemetry.instrumentation.system_metrics import (
|
from opentelemetry.instrumentation.system_metrics import (
|
||||||
SystemMetricsInstrumentor,
|
SystemMetricsInstrumentor,
|
||||||
)
|
)
|
||||||
|
from opentelemetry.sdk.metrics import MeterProvider
|
||||||
|
from opentelemetry.sdk.metrics.export import InMemoryMetricReader
|
||||||
|
from opentelemetry.test.test_base import TestBase
|
||||||
|
|
||||||
|
|
||||||
def _mock_netconnection():
|
def _mock_netconnection():
|
||||||
|
|
@ -120,12 +119,14 @@ class TestSystemMetrics(TestBase):
|
||||||
f"process.runtime.{self.implementation}.context_switches",
|
f"process.runtime.{self.implementation}.context_switches",
|
||||||
f"process.runtime.{self.implementation}.cpu.utilization",
|
f"process.runtime.{self.implementation}.cpu.utilization",
|
||||||
]
|
]
|
||||||
|
|
||||||
if self.implementation == "pypy":
|
if self.implementation == "pypy":
|
||||||
self.assertEqual(len(metric_names), 20)
|
self.assertEqual(len(metric_names), 20)
|
||||||
else:
|
else:
|
||||||
self.assertEqual(len(metric_names), 21)
|
self.assertEqual(len(metric_names), 21)
|
||||||
observer_names.append(f"process.runtime.{self.implementation}.gc_count",)
|
observer_names.append(
|
||||||
|
f"process.runtime.{self.implementation}.gc_count",
|
||||||
|
)
|
||||||
|
|
||||||
for observer in metric_names:
|
for observer in metric_names:
|
||||||
self.assertIn(observer, observer_names)
|
self.assertIn(observer, observer_names)
|
||||||
|
|
@ -139,7 +140,7 @@ class TestSystemMetrics(TestBase):
|
||||||
"process.runtime.cpu.utilization": None,
|
"process.runtime.cpu.utilization": None,
|
||||||
"process.runtime.context_switches": ["involuntary", "voluntary"],
|
"process.runtime.context_switches": ["involuntary", "voluntary"],
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.implementation != "pypy":
|
if self.implementation != "pypy":
|
||||||
runtime_config["process.runtime.gc_count"] = None
|
runtime_config["process.runtime.gc_count"] = None
|
||||||
|
|
||||||
|
|
@ -166,7 +167,9 @@ class TestSystemMetrics(TestBase):
|
||||||
self.assertEqual(len(metric_names), 5)
|
self.assertEqual(len(metric_names), 5)
|
||||||
else:
|
else:
|
||||||
self.assertEqual(len(metric_names), 6)
|
self.assertEqual(len(metric_names), 6)
|
||||||
observer_names.append(f"process.runtime.{self.implementation}.gc_count")
|
observer_names.append(
|
||||||
|
f"process.runtime.{self.implementation}.gc_count"
|
||||||
|
)
|
||||||
|
|
||||||
for observer in metric_names:
|
for observer in metric_names:
|
||||||
self.assertIn(observer, observer_names)
|
self.assertIn(observer, observer_names)
|
||||||
|
|
@ -181,9 +184,9 @@ class TestSystemMetrics(TestBase):
|
||||||
for data_point in metric.data.data_points:
|
for data_point in metric.data.data_points:
|
||||||
for expect in expected:
|
for expect in expected:
|
||||||
if (
|
if (
|
||||||
dict(data_point.attributes)
|
dict(data_point.attributes)
|
||||||
== expect.attributes
|
== expect.attributes
|
||||||
and metric.name == observer_name
|
and metric.name == observer_name
|
||||||
):
|
):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
data_point.value,
|
data_point.value,
|
||||||
|
|
@ -791,7 +794,9 @@ class TestSystemMetrics(TestBase):
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("gc.get_count")
|
@mock.patch("gc.get_count")
|
||||||
@skipIf(python_implementation().lower() == "pypy", "not supported for pypy")
|
@skipIf(
|
||||||
|
python_implementation().lower() == "pypy", "not supported for pypy"
|
||||||
|
)
|
||||||
def test_runtime_get_count(self, mock_gc_get_count):
|
def test_runtime_get_count(self, mock_gc_get_count):
|
||||||
mock_gc_get_count.configure_mock(**{"return_value": (1, 2, 3)})
|
mock_gc_get_count.configure_mock(**{"return_value": (1, 2, 3)})
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -239,8 +239,8 @@ def _instrument(
|
||||||
token = context.attach(
|
token = context.attach(
|
||||||
context.set_value(_SUPPRESS_HTTP_INSTRUMENTATION_KEY, True)
|
context.set_value(_SUPPRESS_HTTP_INSTRUMENTATION_KEY, True)
|
||||||
)
|
)
|
||||||
|
start_time = default_timer()
|
||||||
try:
|
try:
|
||||||
start_time = default_timer()
|
|
||||||
result = call_wrapped() # *** PROCEED
|
result = call_wrapped() # *** PROCEED
|
||||||
except Exception as exc: # pylint: disable=W0703
|
except Exception as exc: # pylint: disable=W0703
|
||||||
exception = exc
|
exception = exc
|
||||||
|
|
|
||||||
|
|
@ -13,14 +13,14 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
from platform import python_implementation
|
||||||
|
from sys import version_info
|
||||||
from timeit import default_timer
|
from timeit import default_timer
|
||||||
from urllib import request
|
from urllib import request
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from pytest import mark
|
|
||||||
from platform import python_implementation
|
|
||||||
from sys import version_info
|
|
||||||
|
|
||||||
import httpretty
|
import httpretty
|
||||||
|
from pytest import mark
|
||||||
|
|
||||||
from opentelemetry.instrumentation.urllib import ( # pylint: disable=no-name-in-module,import-error
|
from opentelemetry.instrumentation.urllib import ( # pylint: disable=no-name-in-module,import-error
|
||||||
URLLibInstrumentor,
|
URLLibInstrumentor,
|
||||||
|
|
@ -190,22 +190,16 @@ class TestUrllibMetricsInstrumentation(TestBase):
|
||||||
|
|
||||||
@mark.skipif(
|
@mark.skipif(
|
||||||
python_implementation() == "PyPy" or version_info.minor == 7,
|
python_implementation() == "PyPy" or version_info.minor == 7,
|
||||||
reason="Fails randomly in 3.7 and pypy"
|
reason="Fails randomly in 3.7 and pypy",
|
||||||
)
|
)
|
||||||
def test_metric_uninstrument(self):
|
def test_metric_uninstrument(self):
|
||||||
with request.urlopen(self.URL):
|
with request.urlopen(self.URL):
|
||||||
metrics = self.get_sorted_metrics()
|
metrics = self.get_sorted_metrics()
|
||||||
self.assertEqual(len(metrics), 3)
|
self.assertEqual(len(metrics), 3)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(metrics[0].data.data_points[0].sum, 1)
|
||||||
metrics[0].data.data_points[0].sum, 1
|
self.assertEqual(metrics[1].data.data_points[0].sum, 0)
|
||||||
)
|
self.assertEqual(metrics[2].data.data_points[0].sum, 6)
|
||||||
self.assertEqual(
|
|
||||||
metrics[1].data.data_points[0].sum, 0
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
metrics[2].data.data_points[0].sum, 6
|
|
||||||
)
|
|
||||||
|
|
||||||
URLLibInstrumentor().uninstrument()
|
URLLibInstrumentor().uninstrument()
|
||||||
with request.urlopen(self.URL):
|
with request.urlopen(self.URL):
|
||||||
|
|
|
||||||
|
|
@ -78,6 +78,7 @@ def create_gen_wsgi(response):
|
||||||
|
|
||||||
def error_wsgi(environ, start_response):
|
def error_wsgi(environ, start_response):
|
||||||
assert isinstance(environ, dict)
|
assert isinstance(environ, dict)
|
||||||
|
exc_info = None
|
||||||
try:
|
try:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
|
|
||||||
|
|
@ -77,7 +77,7 @@ def _pip_check():
|
||||||
) as check_pipe:
|
) as check_pipe:
|
||||||
pip_check = check_pipe.communicate()[0].decode()
|
pip_check = check_pipe.communicate()[0].decode()
|
||||||
pip_check_lower = pip_check.lower()
|
pip_check_lower = pip_check.lower()
|
||||||
for package_tup in libraries.values():
|
for package_tup in libraries:
|
||||||
for package in package_tup:
|
for package in package_tup:
|
||||||
if package.lower() in pip_check_lower:
|
if package.lower() in pip_check_lower:
|
||||||
raise RuntimeError(f"Dependency conflict found: {pip_check}")
|
raise RuntimeError(f"Dependency conflict found: {pip_check}")
|
||||||
|
|
@ -102,15 +102,12 @@ def _is_installed(req):
|
||||||
|
|
||||||
|
|
||||||
def _find_installed_libraries():
|
def _find_installed_libraries():
|
||||||
libs = default_instrumentations[:]
|
for lib in default_instrumentations:
|
||||||
libs.extend(
|
yield lib
|
||||||
[
|
|
||||||
v["instrumentation"]
|
for lib in libraries:
|
||||||
for _, v in libraries.items()
|
if _is_installed(lib["library"]):
|
||||||
if _is_installed(v["library"])
|
yield lib["instrumentation"]
|
||||||
]
|
|
||||||
)
|
|
||||||
return libs
|
|
||||||
|
|
||||||
|
|
||||||
def _run_requirements():
|
def _run_requirements():
|
||||||
|
|
|
||||||
|
|
@ -15,176 +15,176 @@
|
||||||
# DO NOT EDIT. THIS FILE WAS AUTOGENERATED FROM INSTRUMENTATION PACKAGES.
|
# DO NOT EDIT. THIS FILE WAS AUTOGENERATED FROM INSTRUMENTATION PACKAGES.
|
||||||
# RUN `python scripts/generate_instrumentation_bootstrap.py` TO REGENERATE.
|
# RUN `python scripts/generate_instrumentation_bootstrap.py` TO REGENERATE.
|
||||||
|
|
||||||
libraries = {
|
libraries = [
|
||||||
"aio_pika": {
|
{
|
||||||
"library": "aio_pika >= 7.2.0, < 10.0.0",
|
"library": "aio_pika >= 7.2.0, < 10.0.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-aio-pika==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-aio-pika==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"aiohttp": {
|
{
|
||||||
"library": "aiohttp ~= 3.0",
|
"library": "aiohttp ~= 3.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-aiohttp-client==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-aiohttp-client==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"aiohttp": {
|
{
|
||||||
"library": "aiohttp ~= 3.0",
|
"library": "aiohttp ~= 3.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-aiohttp-server==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-aiohttp-server==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"aiopg": {
|
{
|
||||||
"library": "aiopg >= 0.13.0, < 2.0.0",
|
"library": "aiopg >= 0.13.0, < 2.0.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-aiopg==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-aiopg==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"asgiref": {
|
{
|
||||||
"library": "asgiref ~= 3.0",
|
"library": "asgiref ~= 3.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-asgi==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-asgi==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"asyncpg": {
|
{
|
||||||
"library": "asyncpg >= 0.12.0",
|
"library": "asyncpg >= 0.12.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-asyncpg==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-asyncpg==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"boto": {
|
{
|
||||||
"library": "boto~=2.0",
|
"library": "boto~=2.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-boto==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-boto==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"boto3": {
|
{
|
||||||
"library": "boto3 ~= 1.0",
|
"library": "boto3 ~= 1.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-boto3sqs==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-boto3sqs==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"botocore": {
|
{
|
||||||
"library": "botocore ~= 1.0",
|
"library": "botocore ~= 1.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-botocore==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-botocore==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"cassandra-driver": {
|
{
|
||||||
"library": "cassandra-driver ~= 3.25",
|
"library": "cassandra-driver ~= 3.25",
|
||||||
"instrumentation": "opentelemetry-instrumentation-cassandra==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-cassandra==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"scylla-driver": {
|
{
|
||||||
"library": "scylla-driver ~= 3.25",
|
"library": "scylla-driver ~= 3.25",
|
||||||
"instrumentation": "opentelemetry-instrumentation-cassandra==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-cassandra==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"celery": {
|
{
|
||||||
"library": "celery >= 4.0, < 6.0",
|
"library": "celery >= 4.0, < 6.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-celery==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-celery==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"confluent-kafka": {
|
{
|
||||||
"library": "confluent-kafka >= 1.8.2, <= 2.2.0",
|
"library": "confluent-kafka >= 1.8.2, <= 2.2.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-confluent-kafka==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-confluent-kafka==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"django": {
|
{
|
||||||
"library": "django >= 1.10",
|
"library": "django >= 1.10",
|
||||||
"instrumentation": "opentelemetry-instrumentation-django==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-django==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"elasticsearch": {
|
{
|
||||||
"library": "elasticsearch >= 2.0",
|
"library": "elasticsearch >= 2.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-elasticsearch==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-elasticsearch==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"falcon": {
|
{
|
||||||
"library": "falcon >= 1.4.1, < 4.0.0",
|
"library": "falcon >= 1.4.1, < 4.0.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-falcon==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-falcon==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"fastapi": {
|
{
|
||||||
"library": "fastapi ~= 0.58",
|
"library": "fastapi ~= 0.58",
|
||||||
"instrumentation": "opentelemetry-instrumentation-fastapi==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-fastapi==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"flask": {
|
{
|
||||||
"library": "flask >= 1.0, < 3.0",
|
"library": "flask >= 1.0, < 3.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-flask==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-flask==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"werkzeug": {
|
{
|
||||||
"library": "werkzeug < 3.0.0",
|
"library": "werkzeug < 3.0.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-flask==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-flask==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"grpcio": {
|
{
|
||||||
"library": "grpcio ~= 1.27",
|
"library": "grpcio ~= 1.27",
|
||||||
"instrumentation": "opentelemetry-instrumentation-grpc==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-grpc==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"httpx": {
|
{
|
||||||
"library": "httpx >= 0.18.0",
|
"library": "httpx >= 0.18.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-httpx==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-httpx==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"jinja2": {
|
{
|
||||||
"library": "jinja2 >= 2.7, < 4.0",
|
"library": "jinja2 >= 2.7, < 4.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-jinja2==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-jinja2==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"kafka-python": {
|
{
|
||||||
"library": "kafka-python >= 2.0",
|
"library": "kafka-python >= 2.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-kafka-python==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-kafka-python==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"mysql-connector-python": {
|
{
|
||||||
"library": "mysql-connector-python ~= 8.0",
|
"library": "mysql-connector-python ~= 8.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-mysql==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-mysql==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"mysqlclient": {
|
{
|
||||||
"library": "mysqlclient < 3",
|
"library": "mysqlclient < 3",
|
||||||
"instrumentation": "opentelemetry-instrumentation-mysqlclient==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-mysqlclient==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"pika": {
|
{
|
||||||
"library": "pika >= 0.12.0",
|
"library": "pika >= 0.12.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-pika==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-pika==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"psycopg2": {
|
{
|
||||||
"library": "psycopg2 >= 2.7.3.1",
|
"library": "psycopg2 >= 2.7.3.1",
|
||||||
"instrumentation": "opentelemetry-instrumentation-psycopg2==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-psycopg2==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"pymemcache": {
|
{
|
||||||
"library": "pymemcache >= 1.3.5, < 5",
|
"library": "pymemcache >= 1.3.5, < 5",
|
||||||
"instrumentation": "opentelemetry-instrumentation-pymemcache==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-pymemcache==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"pymongo": {
|
{
|
||||||
"library": "pymongo >= 3.1, < 5.0",
|
"library": "pymongo >= 3.1, < 5.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-pymongo==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-pymongo==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"PyMySQL": {
|
{
|
||||||
"library": "PyMySQL < 2",
|
"library": "PyMySQL < 2",
|
||||||
"instrumentation": "opentelemetry-instrumentation-pymysql==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-pymysql==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"pyramid": {
|
{
|
||||||
"library": "pyramid >= 1.7",
|
"library": "pyramid >= 1.7",
|
||||||
"instrumentation": "opentelemetry-instrumentation-pyramid==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-pyramid==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"redis": {
|
{
|
||||||
"library": "redis >= 2.6",
|
"library": "redis >= 2.6",
|
||||||
"instrumentation": "opentelemetry-instrumentation-redis==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-redis==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"remoulade": {
|
{
|
||||||
"library": "remoulade >= 0.50",
|
"library": "remoulade >= 0.50",
|
||||||
"instrumentation": "opentelemetry-instrumentation-remoulade==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-remoulade==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"requests": {
|
{
|
||||||
"library": "requests ~= 2.0",
|
"library": "requests ~= 2.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-requests==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-requests==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"scikit-learn": {
|
{
|
||||||
"library": "scikit-learn ~= 0.24.0",
|
"library": "scikit-learn ~= 0.24.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-sklearn==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-sklearn==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"sqlalchemy": {
|
{
|
||||||
"library": "sqlalchemy",
|
"library": "sqlalchemy",
|
||||||
"instrumentation": "opentelemetry-instrumentation-sqlalchemy==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-sqlalchemy==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"starlette": {
|
{
|
||||||
"library": "starlette ~= 0.13.0",
|
"library": "starlette ~= 0.13.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-starlette==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-starlette==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"psutil": {
|
{
|
||||||
"library": "psutil >= 5",
|
"library": "psutil >= 5",
|
||||||
"instrumentation": "opentelemetry-instrumentation-system-metrics==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-system-metrics==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"tornado": {
|
{
|
||||||
"library": "tornado >= 5.1.1",
|
"library": "tornado >= 5.1.1",
|
||||||
"instrumentation": "opentelemetry-instrumentation-tornado==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-tornado==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"tortoise-orm": {
|
{
|
||||||
"library": "tortoise-orm >= 0.17.0",
|
"library": "tortoise-orm >= 0.17.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-tortoiseorm==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-tortoiseorm==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"pydantic": {
|
{
|
||||||
"library": "pydantic >= 1.10.2",
|
"library": "pydantic >= 1.10.2",
|
||||||
"instrumentation": "opentelemetry-instrumentation-tortoiseorm==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-tortoiseorm==0.43b0.dev",
|
||||||
},
|
},
|
||||||
"urllib3": {
|
{
|
||||||
"library": "urllib3 >= 1.0.0, < 3.0.0",
|
"library": "urllib3 >= 1.0.0, < 3.0.0",
|
||||||
"instrumentation": "opentelemetry-instrumentation-urllib3==0.43b0.dev",
|
"instrumentation": "opentelemetry-instrumentation-urllib3==0.43b0.dev",
|
||||||
},
|
},
|
||||||
}
|
]
|
||||||
default_instrumentations = [
|
default_instrumentations = [
|
||||||
"opentelemetry-instrumentation-aws-lambda==0.43b0.dev",
|
"opentelemetry-instrumentation-aws-lambda==0.43b0.dev",
|
||||||
"opentelemetry-instrumentation-dbapi==0.43b0.dev",
|
"opentelemetry-instrumentation-dbapi==0.43b0.dev",
|
||||||
|
|
|
||||||
|
|
@ -206,9 +206,10 @@ class _OpenTelemetrySemanticConventionStability:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_opentelemetry_stability_opt_in(
|
def _get_opentelemetry_stability_opt_in(
|
||||||
type: _OpenTelemetryStabilitySignalType,
|
cls,
|
||||||
|
signal_type: _OpenTelemetryStabilitySignalType,
|
||||||
) -> _OpenTelemetryStabilityMode:
|
) -> _OpenTelemetryStabilityMode:
|
||||||
with _OpenTelemetrySemanticConventionStability._lock:
|
with _OpenTelemetrySemanticConventionStability._lock:
|
||||||
return _OpenTelemetrySemanticConventionStability._OTEL_SEMCONV_STABILITY_SIGNAL_MAPPING.get(
|
return _OpenTelemetrySemanticConventionStability._OTEL_SEMCONV_STABILITY_SIGNAL_MAPPING.get(
|
||||||
type, _OpenTelemetryStabilityMode.DEFAULT
|
signal_type, _OpenTelemetryStabilityMode.DEFAULT
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -36,7 +36,7 @@ class TestBootstrap(TestCase):
|
||||||
@classmethod
|
@classmethod
|
||||||
def setUpClass(cls):
|
def setUpClass(cls):
|
||||||
cls.installed_libraries = sample_packages(
|
cls.installed_libraries = sample_packages(
|
||||||
[lib["instrumentation"] for lib in libraries.values()], 0.6
|
[lib["instrumentation"] for lib in libraries], 0.6
|
||||||
)
|
)
|
||||||
|
|
||||||
# treat 50% of sampled packages as pre-installed
|
# treat 50% of sampled packages as pre-installed
|
||||||
|
|
|
||||||
|
|
@ -14,8 +14,12 @@
|
||||||
|
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
from opentelemetry.sdk.resources import ResourceDetector, Resource
|
from opentelemetry.sdk.resources import Resource, ResourceDetector
|
||||||
from opentelemetry.semconv.resource import ResourceAttributes, CloudPlatformValues, CloudProviderValues
|
from opentelemetry.semconv.resource import (
|
||||||
|
CloudPlatformValues,
|
||||||
|
CloudProviderValues,
|
||||||
|
ResourceAttributes,
|
||||||
|
)
|
||||||
|
|
||||||
_AZURE_APP_SERVICE_STAMP_RESOURCE_ATTRIBUTE = "azure.app.service.stamp"
|
_AZURE_APP_SERVICE_STAMP_RESOURCE_ATTRIBUTE = "azure.app.service.stamp"
|
||||||
_REGION_NAME = "REGION_NAME"
|
_REGION_NAME = "REGION_NAME"
|
||||||
|
|
@ -36,18 +40,25 @@ _APP_SERVICE_ATTRIBUTE_ENV_VARS = {
|
||||||
_AZURE_APP_SERVICE_STAMP_RESOURCE_ATTRIBUTE: _WEBSITE_HOME_STAMPNAME,
|
_AZURE_APP_SERVICE_STAMP_RESOURCE_ATTRIBUTE: _WEBSITE_HOME_STAMPNAME,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class AzureAppServiceResourceDetector(ResourceDetector):
|
class AzureAppServiceResourceDetector(ResourceDetector):
|
||||||
def detect(self) -> Resource:
|
def detect(self) -> Resource:
|
||||||
attributes = {}
|
attributes = {}
|
||||||
website_site_name = environ.get(_WEBSITE_SITE_NAME)
|
website_site_name = environ.get(_WEBSITE_SITE_NAME)
|
||||||
if website_site_name:
|
if website_site_name:
|
||||||
attributes[ResourceAttributes.SERVICE_NAME] = website_site_name
|
attributes[ResourceAttributes.SERVICE_NAME] = website_site_name
|
||||||
attributes[ResourceAttributes.CLOUD_PROVIDER] = CloudProviderValues.AZURE.value
|
attributes[
|
||||||
attributes[ResourceAttributes.CLOUD_PLATFORM] = CloudPlatformValues.AZURE_APP_SERVICE.value
|
ResourceAttributes.CLOUD_PROVIDER
|
||||||
|
] = CloudProviderValues.AZURE.value
|
||||||
|
attributes[
|
||||||
|
ResourceAttributes.CLOUD_PLATFORM
|
||||||
|
] = CloudPlatformValues.AZURE_APP_SERVICE.value
|
||||||
|
|
||||||
azure_resource_uri = _get_azure_resource_uri(website_site_name)
|
azure_resource_uri = _get_azure_resource_uri(website_site_name)
|
||||||
if azure_resource_uri:
|
if azure_resource_uri:
|
||||||
attributes[ResourceAttributes.CLOUD_RESOURCE_ID] = azure_resource_uri
|
attributes[
|
||||||
|
ResourceAttributes.CLOUD_RESOURCE_ID
|
||||||
|
] = azure_resource_uri
|
||||||
for (key, env_var) in _APP_SERVICE_ATTRIBUTE_ENV_VARS.items():
|
for (key, env_var) in _APP_SERVICE_ATTRIBUTE_ENV_VARS.items():
|
||||||
value = environ.get(env_var)
|
value = environ.get(env_var)
|
||||||
if value:
|
if value:
|
||||||
|
|
@ -55,19 +66,16 @@ class AzureAppServiceResourceDetector(ResourceDetector):
|
||||||
|
|
||||||
return Resource(attributes)
|
return Resource(attributes)
|
||||||
|
|
||||||
|
|
||||||
def _get_azure_resource_uri(website_site_name):
|
def _get_azure_resource_uri(website_site_name):
|
||||||
website_resource_group = environ.get(_WEBSITE_RESOURCE_GROUP)
|
website_resource_group = environ.get(_WEBSITE_RESOURCE_GROUP)
|
||||||
website_owner_name = environ.get(_WEBSITE_OWNER_NAME)
|
website_owner_name = environ.get(_WEBSITE_OWNER_NAME)
|
||||||
|
|
||||||
subscription_id = website_owner_name
|
subscription_id = website_owner_name
|
||||||
if website_owner_name and '+' in website_owner_name:
|
if website_owner_name and "+" in website_owner_name:
|
||||||
subscription_id = website_owner_name[0:website_owner_name.index('+')]
|
subscription_id = website_owner_name[0 : website_owner_name.index("+")]
|
||||||
|
|
||||||
if not (website_resource_group and subscription_id):
|
if not (website_resource_group and subscription_id):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Web/sites/%s" % (
|
return f"/subscriptions/{subscription_id}/resourceGroups/{website_resource_group}/providers/Microsoft.Web/sites/{website_site_name}"
|
||||||
subscription_id,
|
|
||||||
website_resource_group,
|
|
||||||
website_site_name,
|
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -14,18 +14,16 @@
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from os import environ
|
|
||||||
from urllib.request import Request, urlopen
|
|
||||||
from urllib.error import URLError
|
from urllib.error import URLError
|
||||||
|
from urllib.request import Request, urlopen
|
||||||
|
|
||||||
from opentelemetry.sdk.resources import ResourceDetector, Resource
|
from opentelemetry.sdk.resources import Resource, ResourceDetector
|
||||||
from opentelemetry.semconv.resource import (
|
from opentelemetry.semconv.resource import (
|
||||||
ResourceAttributes,
|
|
||||||
CloudPlatformValues,
|
CloudPlatformValues,
|
||||||
CloudProviderValues,
|
CloudProviderValues,
|
||||||
|
ResourceAttributes,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# TODO: Remove when cloud resource id is no longer missing in Resource Attributes
|
# TODO: Remove when cloud resource id is no longer missing in Resource Attributes
|
||||||
_AZURE_VM_METADATA_ENDPOINT = "http://169.254.169.254/metadata/instance/compute?api-version=2021-12-13&format=json"
|
_AZURE_VM_METADATA_ENDPOINT = "http://169.254.169.254/metadata/instance/compute?api-version=2021-12-13&format=json"
|
||||||
_AZURE_VM_SCALE_SET_NAME_ATTRIBUTE = "azure.vm.scaleset.name"
|
_AZURE_VM_SCALE_SET_NAME_ATTRIBUTE = "azure.vm.scaleset.name"
|
||||||
|
|
@ -67,20 +65,22 @@ class AzureVMResourceDetector(ResourceDetector):
|
||||||
|
|
||||||
|
|
||||||
class _AzureVMMetadataServiceRequestor:
|
class _AzureVMMetadataServiceRequestor:
|
||||||
def get_azure_vm_metadata(self):
|
def get_azure_vm_metadata(self): # pylint: disable=no-self-use
|
||||||
request = Request(_AZURE_VM_METADATA_ENDPOINT)
|
request = Request(_AZURE_VM_METADATA_ENDPOINT)
|
||||||
request.add_header("Metadata", "True")
|
request.add_header("Metadata", "True")
|
||||||
try:
|
try:
|
||||||
response = urlopen(request).read()
|
with urlopen(request).read() as response:
|
||||||
return loads(response)
|
return loads(response)
|
||||||
except URLError:
|
except URLError:
|
||||||
# Not on Azure VM
|
# Not on Azure VM
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e: # pylint: disable=broad-except,invalid-name
|
||||||
_logger.exception("Failed to receive Azure VM metadata: %s", e)
|
_logger.exception("Failed to receive Azure VM metadata: %s", e)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_attribute_from_metadata(self, metadata_json, attribute_key):
|
def get_attribute_from_metadata(
|
||||||
|
self, metadata_json, attribute_key
|
||||||
|
): # pylint: disable=no-self-use
|
||||||
ams_value = ""
|
ams_value = ""
|
||||||
if attribute_key == _AZURE_VM_SCALE_SET_NAME_ATTRIBUTE:
|
if attribute_key == _AZURE_VM_SCALE_SET_NAME_ATTRIBUTE:
|
||||||
ams_value = metadata_json["vmScaleSetName"]
|
ams_value = metadata_json["vmScaleSetName"]
|
||||||
|
|
@ -94,9 +94,9 @@ class _AzureVMMetadataServiceRequestor:
|
||||||
ams_value = metadata_json["location"]
|
ams_value = metadata_json["location"]
|
||||||
elif attribute_key == ResourceAttributes.CLOUD_RESOURCE_ID:
|
elif attribute_key == ResourceAttributes.CLOUD_RESOURCE_ID:
|
||||||
ams_value = metadata_json["resourceId"]
|
ams_value = metadata_json["resourceId"]
|
||||||
elif (
|
elif attribute_key in (
|
||||||
attribute_key == ResourceAttributes.HOST_ID
|
ResourceAttributes.HOST_ID,
|
||||||
or attribute_key == ResourceAttributes.SERVICE_INSTANCE_ID
|
ResourceAttributes.SERVICE_INSTANCE_ID,
|
||||||
):
|
):
|
||||||
ams_value = metadata_json["vmId"]
|
ams_value = metadata_json["vmId"]
|
||||||
elif attribute_key == ResourceAttributes.HOST_NAME:
|
elif attribute_key == ResourceAttributes.HOST_NAME:
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
# pylint: disable=no-name-in-module
|
||||||
from opentelemetry.resource.detector.azure.app_service import (
|
from opentelemetry.resource.detector.azure.app_service import (
|
||||||
AzureAppServiceResourceDetector,
|
AzureAppServiceResourceDetector,
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -12,12 +12,10 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import patch, Mock
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
from opentelemetry.semconv.resource import ResourceAttributes
|
# pylint: disable=no-name-in-module
|
||||||
from opentelemetry.resource.detector.azure.vm import (
|
from opentelemetry.resource.detector.azure.vm import AzureVMResourceDetector
|
||||||
AzureVMResourceDetector,
|
|
||||||
)
|
|
||||||
|
|
||||||
LINUX_JSON = """
|
LINUX_JSON = """
|
||||||
{
|
{
|
||||||
|
|
@ -369,10 +367,8 @@ class TestAzureVMResourceDetector(unittest.TestCase):
|
||||||
mock_urlopen.return_value = mock_open
|
mock_urlopen.return_value = mock_open
|
||||||
mock_open.read.return_value = LINUX_JSON
|
mock_open.read.return_value = LINUX_JSON
|
||||||
attributes = AzureVMResourceDetector().detect().attributes
|
attributes = AzureVMResourceDetector().detect().attributes
|
||||||
for attribute_key in LINUX_ATTRIBUTES:
|
for attribute_key, attribute_value in LINUX_ATTRIBUTES.items():
|
||||||
self.assertEqual(
|
self.assertEqual(attributes[attribute_key], attribute_value)
|
||||||
attributes[attribute_key], LINUX_ATTRIBUTES[attribute_key]
|
|
||||||
)
|
|
||||||
|
|
||||||
@patch("opentelemetry.resource.detector.azure.vm.urlopen")
|
@patch("opentelemetry.resource.detector.azure.vm.urlopen")
|
||||||
def test_windows(self, mock_urlopen):
|
def test_windows(self, mock_urlopen):
|
||||||
|
|
@ -380,7 +376,5 @@ class TestAzureVMResourceDetector(unittest.TestCase):
|
||||||
mock_urlopen.return_value = mock_open
|
mock_urlopen.return_value = mock_open
|
||||||
mock_open.read.return_value = WINDOWS_JSON
|
mock_open.read.return_value = WINDOWS_JSON
|
||||||
attributes = AzureVMResourceDetector().detect().attributes
|
attributes = AzureVMResourceDetector().detect().attributes
|
||||||
for attribute_key in WINDOWS_ATTRIBUTES:
|
for attribute_key, attribute_value in LINUX_ATTRIBUTES.items():
|
||||||
self.assertEqual(
|
self.assertEqual(attributes[attribute_key], attribute_value)
|
||||||
attributes[attribute_key], WINDOWS_ATTRIBUTES[attribute_key]
|
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,6 @@ import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import astor
|
import astor
|
||||||
import pkg_resources
|
|
||||||
from otel_packaging import (
|
from otel_packaging import (
|
||||||
get_instrumentation_packages,
|
get_instrumentation_packages,
|
||||||
root_path,
|
root_path,
|
||||||
|
|
@ -58,14 +57,12 @@ gen_path = os.path.join(
|
||||||
def main():
|
def main():
|
||||||
# pylint: disable=no-member
|
# pylint: disable=no-member
|
||||||
default_instrumentations = ast.List(elts=[])
|
default_instrumentations = ast.List(elts=[])
|
||||||
libraries = ast.Dict(keys=[], values=[])
|
libraries = ast.List(elts=[])
|
||||||
for pkg in get_instrumentation_packages():
|
for pkg in get_instrumentation_packages():
|
||||||
if not pkg["instruments"]:
|
if not pkg["instruments"]:
|
||||||
default_instrumentations.elts.append(ast.Str(pkg["requirement"]))
|
default_instrumentations.elts.append(ast.Str(pkg["requirement"]))
|
||||||
for target_pkg in pkg["instruments"]:
|
for target_pkg in pkg["instruments"]:
|
||||||
parsed = pkg_resources.Requirement.parse(target_pkg)
|
libraries.elts.append(
|
||||||
libraries.keys.append(ast.Str(parsed.name))
|
|
||||||
libraries.values.append(
|
|
||||||
ast.Dict(
|
ast.Dict(
|
||||||
keys=[ast.Str("library"), ast.Str("instrumentation")],
|
keys=[ast.Str("library"), ast.Str("instrumentation")],
|
||||||
values=[ast.Str(target_pkg), ast.Str(pkg["requirement"])],
|
values=[ast.Str(target_pkg), ast.Str(pkg["requirement"])],
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ WORKFLOW_FILE = ".github/workflows/test.yml"
|
||||||
|
|
||||||
def get_sha(branch):
|
def get_sha(branch):
|
||||||
url = API_URL + branch
|
url = API_URL + branch
|
||||||
response = requests.get(url)
|
response = requests.get(url, timeout=15)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
return response.json()["sha"]
|
return response.json()["sha"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
# pylint:disable=no-name-in-module
|
||||||
|
|
||||||
from opentelemetry.sdk.extension.aws.resource._lambda import (
|
from opentelemetry.sdk.extension.aws.resource._lambda import (
|
||||||
AwsLambdaResourceDetector,
|
AwsLambdaResourceDetector,
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
# pylint:disable=no-name-in-module
|
||||||
|
|
||||||
from opentelemetry.sdk.extension.aws.trace.aws_xray_id_generator import (
|
from opentelemetry.sdk.extension.aws.trace.aws_xray_id_generator import (
|
||||||
AwsXRayIdGenerator,
|
AwsXRayIdGenerator,
|
||||||
|
|
|
||||||
|
|
@ -77,8 +77,7 @@ class AwsXRayIdGenerator(IdGenerator):
|
||||||
def generate_span_id(self) -> int:
|
def generate_span_id(self) -> int:
|
||||||
return self.random_id_generator.generate_span_id()
|
return self.random_id_generator.generate_span_id()
|
||||||
|
|
||||||
@staticmethod
|
def generate_trace_id(self) -> int:
|
||||||
def generate_trace_id() -> int:
|
|
||||||
trace_time = int(time.time())
|
trace_time = int(time.time())
|
||||||
trace_identifier = random.getrandbits(96)
|
trace_identifier = random.getrandbits(96)
|
||||||
return (trace_time << 96) + trace_identifier
|
return (trace_time << 96) + trace_identifier
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,7 @@ import datetime
|
||||||
import time
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
# pylint: disable=no-name-in-module
|
||||||
from opentelemetry.sdk.extension.aws.trace import AwsXRayIdGenerator
|
from opentelemetry.sdk.extension.aws.trace import AwsXRayIdGenerator
|
||||||
from opentelemetry.trace.span import INVALID_TRACE_ID
|
from opentelemetry.trace.span import INVALID_TRACE_ID
|
||||||
|
|
||||||
|
|
|
||||||
27
tox.ini
27
tox.ini
|
|
@ -507,7 +507,7 @@ commands =
|
||||||
sphinx-build -E -a -W -b html -T . _build/html
|
sphinx-build -E -a -W -b html -T . _build/html
|
||||||
|
|
||||||
[testenv:spellcheck]
|
[testenv:spellcheck]
|
||||||
basepython: python3.10
|
basepython: python3
|
||||||
recreate = True
|
recreate = True
|
||||||
deps =
|
deps =
|
||||||
codespell
|
codespell
|
||||||
|
|
@ -516,17 +516,10 @@ commands =
|
||||||
codespell
|
codespell
|
||||||
|
|
||||||
[testenv:lint]
|
[testenv:lint]
|
||||||
basepython: python3.9
|
basepython: python3
|
||||||
recreate = False
|
recreate = True
|
||||||
deps =
|
deps =
|
||||||
-c dev-requirements.txt
|
-r dev-requirements.txt
|
||||||
flaky
|
|
||||||
pylint
|
|
||||||
flake8
|
|
||||||
isort
|
|
||||||
black
|
|
||||||
readme_renderer
|
|
||||||
httpretty
|
|
||||||
|
|
||||||
commands_pre =
|
commands_pre =
|
||||||
python -m pip install "{env:CORE_REPO}#egg=opentelemetry-api&subdirectory=opentelemetry-api"
|
python -m pip install "{env:CORE_REPO}#egg=opentelemetry-api&subdirectory=opentelemetry-api"
|
||||||
|
|
@ -551,7 +544,7 @@ commands_pre =
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-celery[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-celery[test]
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-pika[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-pika[test]
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-aio-pika[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-aio-pika[test]
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-sklearn[test]
|
; python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-sklearn[test]
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-redis[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-redis[test]
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-remoulade[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-remoulade[test]
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-fastapi[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-fastapi[test]
|
||||||
|
|
@ -570,6 +563,8 @@ commands_pre =
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-urllib[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-urllib[test]
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-urllib3[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-urllib3[test]
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-pymysql[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-pymysql[test]
|
||||||
|
# prerequisite: follow the instructions here https://github.com/PyMySQL/mysqlclient#install
|
||||||
|
# for your OS to install the required dependencies
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-mysqlclient[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-mysqlclient[test]
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-pymongo[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-pymongo[test]
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-elasticsearch[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-elasticsearch[test]
|
||||||
|
|
@ -581,7 +576,7 @@ commands_pre =
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-aws-lambda[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-aws-lambda[test]
|
||||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-system-metrics[test]
|
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-system-metrics[test]
|
||||||
python -m pip install -e {toxinidir}/exporter/opentelemetry-exporter-richconsole[test]
|
python -m pip install -e {toxinidir}/exporter/opentelemetry-exporter-richconsole[test]
|
||||||
python -m pip install -e {toxinidir}/exporter/opentelemetry-exporter-prometheus-remote-write[test]
|
# requires snappy headers to be available on the system
|
||||||
python -m pip install -e {toxinidir}/sdk-extension/opentelemetry-sdk-extension-aws[test]
|
python -m pip install -e {toxinidir}/sdk-extension/opentelemetry-sdk-extension-aws[test]
|
||||||
python -m pip install -e {toxinidir}/resource/opentelemetry-resource-detector-container[test]
|
python -m pip install -e {toxinidir}/resource/opentelemetry-resource-detector-container[test]
|
||||||
python -m pip install -e {toxinidir}/propagator/opentelemetry-propagator-aws-xray[test]
|
python -m pip install -e {toxinidir}/propagator/opentelemetry-propagator-aws-xray[test]
|
||||||
|
|
@ -592,7 +587,7 @@ commands =
|
||||||
python scripts/eachdist.py lint --check-only
|
python scripts/eachdist.py lint --check-only
|
||||||
|
|
||||||
[testenv:docker-tests]
|
[testenv:docker-tests]
|
||||||
basepython: python3.10
|
basepython: python3
|
||||||
deps =
|
deps =
|
||||||
pip >= 20.3.3
|
pip >= 20.3.3
|
||||||
pytest
|
pytest
|
||||||
|
|
@ -601,6 +596,9 @@ deps =
|
||||||
mysql-connector-python ~= 8.0
|
mysql-connector-python ~= 8.0
|
||||||
pymongo >= 3.1, < 5.0
|
pymongo >= 3.1, < 5.0
|
||||||
PyMySQL ~= 0.10.1
|
PyMySQL ~= 0.10.1
|
||||||
|
# prerequisite: install libpq-dev (debian) or postgresql-devel (rhel), postgresql (mac)
|
||||||
|
# see https://www.psycopg.org/docs/install.html#build-prerequisites
|
||||||
|
# you might have to install additional packages depending on your OS
|
||||||
psycopg2 ~= 2.9.5
|
psycopg2 ~= 2.9.5
|
||||||
aiopg >= 0.13.0, < 1.3.0
|
aiopg >= 0.13.0, < 1.3.0
|
||||||
sqlalchemy ~= 1.4
|
sqlalchemy ~= 1.4
|
||||||
|
|
@ -608,6 +606,7 @@ deps =
|
||||||
celery[pytest] >= 4.0, < 6.0
|
celery[pytest] >= 4.0, < 6.0
|
||||||
protobuf~=3.13
|
protobuf~=3.13
|
||||||
requests==2.25.0
|
requests==2.25.0
|
||||||
|
# prerequisite: install unixodbc
|
||||||
pyodbc~=4.0.30
|
pyodbc~=4.0.30
|
||||||
flaky==3.7.0
|
flaky==3.7.0
|
||||||
remoulade>=0.50
|
remoulade>=0.50
|
||||||
|
|
|
||||||
|
|
@ -78,7 +78,7 @@ def trysetip(conn: http.client.HTTPConnection, loglevel=logging.DEBUG) -> bool:
|
||||||
state = _getstate()
|
state = _getstate()
|
||||||
if not state:
|
if not state:
|
||||||
return True
|
return True
|
||||||
spanlist = state.get("need_ip") # type: typing.List[Span]
|
spanlist: typing.List[Span] = state.get("need_ip")
|
||||||
if not spanlist:
|
if not spanlist:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
@ -88,7 +88,7 @@ def trysetip(conn: http.client.HTTPConnection, loglevel=logging.DEBUG) -> bool:
|
||||||
|
|
||||||
sock = "<property not accessed>"
|
sock = "<property not accessed>"
|
||||||
try:
|
try:
|
||||||
sock = conn.sock # type: typing.Optional[socket.socket]
|
sock: typing.Optional[socket.socket] = conn.sock
|
||||||
logger.debug("Got socket: %s", sock)
|
logger.debug("Got socket: %s", sock)
|
||||||
if sock is None:
|
if sock is None:
|
||||||
return False
|
return False
|
||||||
|
|
@ -163,7 +163,7 @@ def set_ip_on_next_http_connection(span: Span):
|
||||||
finally:
|
finally:
|
||||||
context.detach(token)
|
context.detach(token)
|
||||||
else:
|
else:
|
||||||
spans = state["need_ip"] # type: typing.List[Span]
|
spans: typing.List[Span] = state["need_ip"]
|
||||||
spans.append(span)
|
spans.append(span)
|
||||||
try:
|
try:
|
||||||
yield
|
yield
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue