Enable lint on CI and update deps (#2067)
* Fix black an isort * change bootstrap_gen to use a list instead of dict * Bunch of updates * Fix build * fix lint * Fix docs * Fix lint * More fixes * Fix lint * fix stupid mistake --------- Co-authored-by: Christian Hartung <christian.hartung@olist.com>
This commit is contained in:
parent
9afaf26b3a
commit
5888d4ef95
13
.pylintrc
13
.pylintrc
|
|
@ -3,7 +3,7 @@
|
|||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-whitelist=
|
||||
extension-pkg-whitelist=cassandra
|
||||
|
||||
# Add list of files or directories to be excluded. They should be base names, not
|
||||
# paths.
|
||||
|
|
@ -29,7 +29,7 @@ limit-inference-results=100
|
|||
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
load-plugins=pylint.extensions.no_self_use
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
|
@ -69,7 +69,6 @@ disable=missing-docstring,
|
|||
duplicate-code,
|
||||
ungrouped-imports, # Leave this up to isort
|
||||
wrong-import-order, # Leave this up to isort
|
||||
bad-continuation, # Leave this up to black
|
||||
line-too-long, # Leave this up to black
|
||||
exec-used,
|
||||
super-with-arguments, # temp-pylint-upgrade
|
||||
|
|
@ -81,6 +80,7 @@ disable=missing-docstring,
|
|||
invalid-overridden-method, # temp-pylint-upgrade
|
||||
missing-module-docstring, # temp-pylint-upgrade
|
||||
import-error, # needed as a workaround as reported here: https://github.com/open-telemetry/opentelemetry-python-contrib/issues/290
|
||||
cyclic-import,
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
|
|
@ -268,13 +268,6 @@ max-line-length=79
|
|||
# Maximum number of lines in a module.
|
||||
max-module-lines=1000
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled. `dict-
|
||||
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
|
||||
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
|
||||
# `empty-line` allows space-only lines.
|
||||
no-space-check=trailing-comma,
|
||||
dict-separator
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
|
|
|||
|
|
@ -1,20 +1,19 @@
|
|||
pylint==2.12.2
|
||||
flake8~=3.7
|
||||
isort~=5.6
|
||||
black>=22.1.0
|
||||
httpretty~=1.0
|
||||
mypy==0.790
|
||||
sphinx
|
||||
sphinx-rtd-theme~=0.4
|
||||
sphinx-autodoc-typehints
|
||||
pytest!=5.2.3
|
||||
pytest-cov>=2.8
|
||||
readme-renderer~=24.0
|
||||
pylint==3.0.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
black==22.3.0
|
||||
httpretty==1.1.4
|
||||
mypy==0.931
|
||||
sphinx==7.1.2
|
||||
sphinx-rtd-theme==2.0.0rc4
|
||||
sphinx-autodoc-typehints==1.25.2
|
||||
pytest==7.1.3
|
||||
pytest-cov==4.1.0
|
||||
readme-renderer==42.0
|
||||
bleach==4.1.0 # transient dependency for readme-renderer
|
||||
grpcio-tools==1.29.0
|
||||
mypy-protobuf>=1.23
|
||||
protobuf~=3.13
|
||||
markupsafe>=2.0.1
|
||||
codespell==2.1.0
|
||||
requests==2.31.0
|
||||
ruamel.yaml==0.17.21
|
||||
flaky==3.7.0
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
sphinx==4.5.0
|
||||
sphinx-rtd-theme~=0.4
|
||||
sphinx-autodoc-typehints
|
||||
sphinx==7.1.2
|
||||
sphinx-rtd-theme==2.0.0rc4
|
||||
sphinx-autodoc-typehints==1.25.2
|
||||
|
||||
# Need to install the api/sdk in the venv for autodoc. Modifying sys.path
|
||||
# doesn't work for pkg_resources.
|
||||
|
|
@ -45,11 +45,8 @@ remoulade>=0.50
|
|||
sqlalchemy>=1.0
|
||||
tornado>=5.1.1
|
||||
tortoise-orm>=0.17.0
|
||||
ddtrace>=0.34.0
|
||||
httpx>=0.18.0
|
||||
|
||||
# indirect dependency pins
|
||||
markupsafe==2.0.1
|
||||
itsdangerous==2.0.1
|
||||
|
||||
docutils==0.16
|
||||
|
|
@ -54,6 +54,7 @@ packages=
|
|||
[lintroots]
|
||||
extraroots=examples/*,scripts/
|
||||
subglob=*.py,tests/,test/,src/*,examples/*
|
||||
ignore=sklearn
|
||||
|
||||
[testroots]
|
||||
extraroots=examples/*,tests/
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@ import random
|
|||
import pytest
|
||||
|
||||
import opentelemetry.test.metrictestutil as metric_util
|
||||
|
||||
# pylint: disable=no-name-in-module
|
||||
from opentelemetry.exporter.prometheus_remote_write import (
|
||||
PrometheusRemoteWriteMetricsExporter,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ from unittest.mock import patch
|
|||
|
||||
import pytest
|
||||
|
||||
# pylint: disable=no-name-in-module
|
||||
from opentelemetry.exporter.prometheus_remote_write import (
|
||||
PrometheusRemoteWriteMetricsExporter,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@
|
|||
|
||||
import asyncio
|
||||
import contextlib
|
||||
import sys
|
||||
import typing
|
||||
import unittest
|
||||
import urllib.parse
|
||||
|
|
@ -116,6 +117,11 @@ class TestAioHttpIntegration(TestBase):
|
|||
status_code=status_code,
|
||||
)
|
||||
|
||||
url = f"http://{host}:{port}/test-path?query=param#foobar"
|
||||
# if python version is < 3.8, then the url will be
|
||||
if sys.version_info[1] < 8:
|
||||
url = f"http://{host}:{port}/test-path#foobar"
|
||||
|
||||
self.assert_spans(
|
||||
[
|
||||
(
|
||||
|
|
@ -123,7 +129,7 @@ class TestAioHttpIntegration(TestBase):
|
|||
(span_status, None),
|
||||
{
|
||||
SpanAttributes.HTTP_METHOD: "GET",
|
||||
SpanAttributes.HTTP_URL: f"http://{host}:{port}/test-path#foobar",
|
||||
SpanAttributes.HTTP_URL: url,
|
||||
SpanAttributes.HTTP_STATUS_CODE: int(
|
||||
status_code
|
||||
),
|
||||
|
|
@ -136,7 +142,7 @@ class TestAioHttpIntegration(TestBase):
|
|||
|
||||
def test_schema_url(self):
|
||||
with self.subTest(status_code=200):
|
||||
host, port = self._http_request(
|
||||
self._http_request(
|
||||
trace_config=aiohttp_client.create_trace_config(),
|
||||
url="/test-path?query=param#foobar",
|
||||
status_code=200,
|
||||
|
|
@ -156,7 +162,7 @@ class TestAioHttpIntegration(TestBase):
|
|||
mock_tracer.start_span.return_value = mock_span
|
||||
with mock.patch("opentelemetry.trace.get_tracer"):
|
||||
# pylint: disable=W0612
|
||||
host, port = self._http_request(
|
||||
self._http_request(
|
||||
trace_config=aiohttp_client.create_trace_config(),
|
||||
url="/test-path?query=param#foobar",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -13,24 +13,24 @@
|
|||
# limitations under the License.
|
||||
|
||||
import urllib
|
||||
from timeit import default_timer
|
||||
from typing import Dict, List, Tuple, Union
|
||||
|
||||
from aiohttp import web
|
||||
from multidict import CIMultiDictProxy
|
||||
from timeit import default_timer
|
||||
from typing import Tuple, Dict, List, Union
|
||||
|
||||
from opentelemetry import context, trace, metrics
|
||||
from opentelemetry import context, metrics, trace
|
||||
from opentelemetry.context import _SUPPRESS_HTTP_INSTRUMENTATION_KEY
|
||||
from opentelemetry.instrumentation.aiohttp_server.package import _instruments
|
||||
from opentelemetry.instrumentation.aiohttp_server.version import __version__
|
||||
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
||||
from opentelemetry.instrumentation.utils import http_status_to_status_code
|
||||
from opentelemetry.propagators.textmap import Getter
|
||||
from opentelemetry.propagate import extract
|
||||
from opentelemetry.semconv.trace import SpanAttributes
|
||||
from opentelemetry.propagators.textmap import Getter
|
||||
from opentelemetry.semconv.metrics import MetricInstruments
|
||||
from opentelemetry.semconv.trace import SpanAttributes
|
||||
from opentelemetry.trace.status import Status, StatusCode
|
||||
from opentelemetry.util.http import get_excluded_urls
|
||||
from opentelemetry.util.http import remove_url_credentials
|
||||
from opentelemetry.util.http import get_excluded_urls, remove_url_credentials
|
||||
|
||||
_duration_attrs = [
|
||||
SpanAttributes.HTTP_METHOD,
|
||||
|
|
@ -127,7 +127,7 @@ def collect_request_attributes(request: web.Request) -> Dict:
|
|||
result[SpanAttributes.HTTP_METHOD] = http_method
|
||||
|
||||
http_host_value_list = (
|
||||
[request.host] if type(request.host) != list else request.host
|
||||
[request.host] if not isinstance(request.host, list) else request.host
|
||||
)
|
||||
if http_host_value_list:
|
||||
result[SpanAttributes.HTTP_SERVER_NAME] = ",".join(
|
||||
|
|
|
|||
|
|
@ -12,25 +12,42 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from enum import Enum
|
||||
from http import HTTPStatus
|
||||
|
||||
import aiohttp
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
import aiohttp
|
||||
from http import HTTPStatus
|
||||
from .utils import HTTPMethod
|
||||
|
||||
from opentelemetry import trace as trace_api
|
||||
from opentelemetry.test.test_base import TestBase
|
||||
from opentelemetry.instrumentation.aiohttp_server import AioHttpServerInstrumentor
|
||||
from opentelemetry.instrumentation.aiohttp_server import (
|
||||
AioHttpServerInstrumentor,
|
||||
)
|
||||
from opentelemetry.semconv.trace import SpanAttributes
|
||||
from opentelemetry.test.globals_test import reset_trace_globals
|
||||
from opentelemetry.test.test_base import TestBase
|
||||
from opentelemetry.util._importlib_metadata import entry_points
|
||||
|
||||
from opentelemetry.test.globals_test import (
|
||||
reset_trace_globals,
|
||||
)
|
||||
|
||||
class HTTPMethod(Enum):
|
||||
"""HTTP methods and descriptions"""
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.value}"
|
||||
|
||||
CONNECT = "CONNECT"
|
||||
DELETE = "DELETE"
|
||||
GET = "GET"
|
||||
HEAD = "HEAD"
|
||||
OPTIONS = "OPTIONS"
|
||||
PATCH = "PATCH"
|
||||
POST = "POST"
|
||||
PUT = "PUT"
|
||||
TRACE = "TRACE"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def tracer():
|
||||
@pytest.fixture(name="tracer", scope="session")
|
||||
def fixture_tracer():
|
||||
test_base = TestBase()
|
||||
|
||||
tracer_provider, memory_exporter = test_base.create_tracer_provider()
|
||||
|
|
@ -47,15 +64,14 @@ async def default_handler(request, status=200):
|
|||
return aiohttp.web.Response(status=status)
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def server_fixture(tracer, aiohttp_server):
|
||||
@pytest_asyncio.fixture(name="server_fixture")
|
||||
async def fixture_server_fixture(tracer, aiohttp_server):
|
||||
_, memory_exporter = tracer
|
||||
|
||||
AioHttpServerInstrumentor().instrument()
|
||||
|
||||
app = aiohttp.web.Application()
|
||||
app.add_routes(
|
||||
[aiohttp.web.get("/test-path", default_handler)])
|
||||
app.add_routes([aiohttp.web.get("/test-path", default_handler)])
|
||||
|
||||
server = await aiohttp_server(app)
|
||||
yield server, app
|
||||
|
|
@ -67,26 +83,31 @@ async def server_fixture(tracer, aiohttp_server):
|
|||
|
||||
def test_checking_instrumentor_pkg_installed():
|
||||
|
||||
(instrumentor_entrypoint,) = entry_points(group="opentelemetry_instrumentor", name="aiohttp-server")
|
||||
(instrumentor_entrypoint,) = entry_points(
|
||||
group="opentelemetry_instrumentor", name="aiohttp-server"
|
||||
)
|
||||
instrumentor = instrumentor_entrypoint.load()()
|
||||
assert (isinstance(instrumentor, AioHttpServerInstrumentor))
|
||||
assert isinstance(instrumentor, AioHttpServerInstrumentor)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("url, expected_method, expected_status_code", [
|
||||
("/test-path", HTTPMethod.GET, HTTPStatus.OK),
|
||||
("/not-found", HTTPMethod.GET, HTTPStatus.NOT_FOUND)
|
||||
])
|
||||
@pytest.mark.parametrize(
|
||||
"url, expected_method, expected_status_code",
|
||||
[
|
||||
("/test-path", HTTPMethod.GET, HTTPStatus.OK),
|
||||
("/not-found", HTTPMethod.GET, HTTPStatus.NOT_FOUND),
|
||||
],
|
||||
)
|
||||
async def test_status_code_instrumentation(
|
||||
tracer,
|
||||
server_fixture,
|
||||
aiohttp_client,
|
||||
url,
|
||||
expected_method,
|
||||
expected_status_code
|
||||
expected_status_code,
|
||||
):
|
||||
_, memory_exporter = tracer
|
||||
server, app = server_fixture
|
||||
server, _ = server_fixture
|
||||
|
||||
assert len(memory_exporter.get_finished_spans()) == 0
|
||||
|
||||
|
|
@ -98,8 +119,12 @@ async def test_status_code_instrumentation(
|
|||
[span] = memory_exporter.get_finished_spans()
|
||||
|
||||
assert expected_method.value == span.attributes[SpanAttributes.HTTP_METHOD]
|
||||
assert expected_status_code == span.attributes[SpanAttributes.HTTP_STATUS_CODE]
|
||||
assert (
|
||||
expected_status_code
|
||||
== span.attributes[SpanAttributes.HTTP_STATUS_CODE]
|
||||
)
|
||||
|
||||
assert f"http://{server.host}:{server.port}{url}" == span.attributes[
|
||||
SpanAttributes.HTTP_URL
|
||||
]
|
||||
assert (
|
||||
f"http://{server.host}:{server.port}{url}"
|
||||
== span.attributes[SpanAttributes.HTTP_URL]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,32 +0,0 @@
|
|||
# Copyright 2020, OpenTelemetry Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class HTTPMethod(Enum):
|
||||
"""HTTP methods and descriptions"""
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.value}"
|
||||
|
||||
CONNECT = 'CONNECT'
|
||||
DELETE = 'DELETE'
|
||||
GET = 'GET'
|
||||
HEAD = 'HEAD'
|
||||
OPTIONS = 'OPTIONS'
|
||||
PATCH = 'PATCH'
|
||||
POST = 'POST'
|
||||
PUT = 'PUT'
|
||||
TRACE = 'TRACE'
|
||||
|
|
@ -215,6 +215,7 @@ class _PoolContextManager(_ContextManager):
|
|||
|
||||
|
||||
class _PoolAcquireContextManager(_ContextManager):
|
||||
# pylint: disable=redefined-slots-in-subclass
|
||||
__slots__ = ("_coro", "_obj", "_pool")
|
||||
|
||||
def __init__(self, coro, pool):
|
||||
|
|
|
|||
|
|
@ -710,6 +710,7 @@ class OpenTelemetryMiddleware:
|
|||
pass
|
||||
|
||||
await send(message)
|
||||
# pylint: disable=too-many-boolean-expressions
|
||||
if (
|
||||
not expecting_trailers
|
||||
and message["type"] == "http.response.body"
|
||||
|
|
|
|||
|
|
@ -227,6 +227,7 @@ async def error_asgi(scope, receive, send):
|
|||
await send({"type": "http.response.body", "body": b"*"})
|
||||
|
||||
|
||||
# pylint: disable=too-many-public-methods
|
||||
class TestAsgiApplication(AsgiTestBase):
|
||||
def validate_outputs(self, outputs, error=None, modifiers=None):
|
||||
# Ensure modifiers is a list
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ def _hydrate_span_from_args(connection, query, parameters) -> dict:
|
|||
span_attributes[SpanAttributes.NET_PEER_NAME] = addr
|
||||
span_attributes[
|
||||
SpanAttributes.NET_TRANSPORT
|
||||
] = NetTransportValues.UNIX.value
|
||||
] = NetTransportValues.OTHER.value
|
||||
|
||||
if query is not None:
|
||||
span_attributes[SpanAttributes.DB_STATEMENT] = query
|
||||
|
|
|
|||
|
|
@ -17,10 +17,12 @@ from importlib import import_module
|
|||
from typing import Any, Callable, Dict
|
||||
from unittest import mock
|
||||
|
||||
from mocks.api_gateway_http_api_event import (
|
||||
from tests.mocks.api_gateway_http_api_event import (
|
||||
MOCK_LAMBDA_API_GATEWAY_HTTP_API_EVENT,
|
||||
)
|
||||
from mocks.api_gateway_proxy_event import MOCK_LAMBDA_API_GATEWAY_PROXY_EVENT
|
||||
from tests.mocks.api_gateway_proxy_event import (
|
||||
MOCK_LAMBDA_API_GATEWAY_PROXY_EVENT,
|
||||
)
|
||||
|
||||
from opentelemetry.environment_variables import OTEL_PROPAGATORS
|
||||
from opentelemetry.instrumentation.aws_lambda import (
|
||||
|
|
@ -103,7 +105,7 @@ class TestAwsLambdaInstrumentor(TestBase):
|
|||
super().setUp()
|
||||
self.common_env_patch = mock.patch.dict(
|
||||
"os.environ",
|
||||
{_HANDLER: "mocks.lambda_function.handler"},
|
||||
{_HANDLER: "tests.mocks.lambda_function.handler"},
|
||||
)
|
||||
self.common_env_patch.start()
|
||||
|
||||
|
|
@ -356,7 +358,7 @@ class TestAwsLambdaInstrumentor(TestBase):
|
|||
def test_api_gateway_proxy_event_sets_attributes(self):
|
||||
handler_patch = mock.patch.dict(
|
||||
"os.environ",
|
||||
{_HANDLER: "mocks.lambda_function.rest_api_handler"},
|
||||
{_HANDLER: "tests.mocks.lambda_function.rest_api_handler"},
|
||||
)
|
||||
handler_patch.start()
|
||||
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ from opentelemetry.semconv.trace import DbSystemValues, SpanAttributes
|
|||
from opentelemetry.trace.span import Span
|
||||
from opentelemetry.util.types import AttributeValue
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
_AttributePathT = Union[str, Tuple[str]]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -99,13 +99,13 @@ class _OpInvoke(_LambdaOperation):
|
|||
# Lambda extension
|
||||
################################################################################
|
||||
|
||||
_OPERATION_MAPPING = {
|
||||
_OPERATION_MAPPING: Dict[str, _LambdaOperation] = {
|
||||
op.operation_name(): op
|
||||
for op in globals().values()
|
||||
if inspect.isclass(op)
|
||||
and issubclass(op, _LambdaOperation)
|
||||
and not inspect.isabstract(op)
|
||||
} # type: Dict[str, _LambdaOperation]
|
||||
}
|
||||
|
||||
|
||||
class _LambdaExtension(_AwsSdkExtension):
|
||||
|
|
|
|||
|
|
@ -82,7 +82,9 @@ class _OpPublish(_SnsOperation):
|
|||
attributes[SpanAttributes.MESSAGING_DESTINATION] = destination_name
|
||||
|
||||
# TODO: Use SpanAttributes.MESSAGING_DESTINATION_NAME when opentelemetry-semantic-conventions 0.42b0 is released
|
||||
attributes["messaging.destination.name"] = cls._extract_input_arn(call_context)
|
||||
attributes["messaging.destination.name"] = cls._extract_input_arn(
|
||||
call_context
|
||||
)
|
||||
call_context.span_name = (
|
||||
f"{'phone_number' if is_phone_number else destination_name} send"
|
||||
)
|
||||
|
|
@ -141,13 +143,13 @@ class _OpPublishBatch(_OpPublish):
|
|||
# SNS extension
|
||||
################################################################################
|
||||
|
||||
_OPERATION_MAPPING = {
|
||||
_OPERATION_MAPPING: Dict[str, _SnsOperation] = {
|
||||
op.operation_name(): op
|
||||
for op in globals().values()
|
||||
if inspect.isclass(op)
|
||||
and issubclass(op, _SnsOperation)
|
||||
and not inspect.isabstract(op)
|
||||
} # type: Dict[str, _SnsOperation]
|
||||
}
|
||||
|
||||
|
||||
class _SnsExtension(_AwsSdkExtension):
|
||||
|
|
|
|||
|
|
@ -57,23 +57,21 @@ class _AwsSdkCallContext:
|
|||
boto_meta = client.meta
|
||||
service_model = boto_meta.service_model
|
||||
|
||||
self.service = service_model.service_name.lower() # type: str
|
||||
self.operation = operation # type: str
|
||||
self.params = params # type: Dict[str, Any]
|
||||
self.service = service_model.service_name.lower()
|
||||
self.operation = operation
|
||||
self.params = params
|
||||
|
||||
# 'operation' and 'service' are essential for instrumentation.
|
||||
# for all other attributes we extract them defensively. All of them should
|
||||
# usually exist unless some future botocore version moved things.
|
||||
self.region = self._get_attr(
|
||||
boto_meta, "region_name"
|
||||
) # type: Optional[str]
|
||||
self.endpoint_url = self._get_attr(
|
||||
self.region: Optional[str] = self._get_attr(boto_meta, "region_name")
|
||||
self.endpoint_url: Optional[str] = self._get_attr(
|
||||
boto_meta, "endpoint_url"
|
||||
) # type: Optional[str]
|
||||
)
|
||||
|
||||
self.api_version = self._get_attr(
|
||||
self.api_version: Optional[str] = self._get_attr(
|
||||
service_model, "api_version"
|
||||
) # type: Optional[str]
|
||||
)
|
||||
# name of the service in proper casing
|
||||
self.service_id = str(
|
||||
self._get_attr(service_model, "service_id", self.service)
|
||||
|
|
|
|||
|
|
@ -122,7 +122,7 @@ class TestSnsExtension(TestBase):
|
|||
target_arn,
|
||||
# TODO: Use SpanAttributes.MESSAGING_DESTINATION_NAME when
|
||||
# opentelemetry-semantic-conventions 0.42b0 is released
|
||||
span.attributes["messaging.destination.name"]
|
||||
span.attributes["messaging.destination.name"],
|
||||
)
|
||||
|
||||
@mock_sns
|
||||
|
|
@ -194,7 +194,7 @@ class TestSnsExtension(TestBase):
|
|||
topic_arn,
|
||||
# TODO: Use SpanAttributes.MESSAGING_DESTINATION_NAME when
|
||||
# opentelemetry-semantic-conventions 0.42b0 is released
|
||||
span.attributes["messaging.destination.name"]
|
||||
span.attributes["messaging.destination.name"],
|
||||
)
|
||||
|
||||
self.assert_injected_span(message1_attrs, span)
|
||||
|
|
|
|||
|
|
@ -43,9 +43,9 @@ import cassandra.cluster
|
|||
from wrapt import wrap_function_wrapper
|
||||
|
||||
from opentelemetry import trace
|
||||
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
||||
from opentelemetry.instrumentation.cassandra.package import _instruments
|
||||
from opentelemetry.instrumentation.cassandra.version import __version__
|
||||
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
||||
from opentelemetry.instrumentation.utils import unwrap
|
||||
from opentelemetry.semconv.trace import SpanAttributes
|
||||
|
||||
|
|
|
|||
|
|
@ -63,6 +63,7 @@ import logging
|
|||
from timeit import default_timer
|
||||
from typing import Collection, Iterable
|
||||
|
||||
from billiard import VERSION
|
||||
from billiard.einfo import ExceptionInfo
|
||||
from celery import signals # pylint: disable=no-name-in-module
|
||||
|
||||
|
|
@ -76,8 +77,6 @@ from opentelemetry.propagate import extract, inject
|
|||
from opentelemetry.propagators.textmap import Getter
|
||||
from opentelemetry.semconv.trace import SpanAttributes
|
||||
from opentelemetry.trace.status import Status, StatusCode
|
||||
from billiard import VERSION
|
||||
|
||||
|
||||
if VERSION >= (4, 0, 1):
|
||||
from billiard.einfo import ExceptionWithTraceback
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@
|
|||
import logging
|
||||
|
||||
from celery import registry # pylint: disable=no-name-in-module
|
||||
from billiard import VERSION
|
||||
|
||||
from opentelemetry.semconv.trace import SpanAttributes
|
||||
|
||||
|
|
|
|||
|
|
@ -107,16 +107,15 @@ from opentelemetry import context, propagate, trace
|
|||
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
||||
from opentelemetry.instrumentation.utils import unwrap
|
||||
from opentelemetry.semconv.trace import MessagingOperationValues
|
||||
from opentelemetry.trace import Link, SpanKind, Tracer
|
||||
from opentelemetry.trace import Tracer
|
||||
|
||||
from .package import _instruments
|
||||
from .utils import (
|
||||
KafkaPropertiesExtractor,
|
||||
_end_current_consume_span,
|
||||
_create_new_consume_span,
|
||||
_end_current_consume_span,
|
||||
_enrich_span,
|
||||
_get_span_name,
|
||||
_kafka_getter,
|
||||
_kafka_setter,
|
||||
)
|
||||
from .version import __version__
|
||||
|
|
|
|||
|
|
@ -2,13 +2,13 @@ from logging import getLogger
|
|||
from typing import List, Optional
|
||||
|
||||
from opentelemetry import context, propagate
|
||||
from opentelemetry.trace import SpanKind, Link
|
||||
from opentelemetry.propagators import textmap
|
||||
from opentelemetry.semconv.trace import (
|
||||
MessagingDestinationKindValues,
|
||||
MessagingOperationValues,
|
||||
SpanAttributes,
|
||||
)
|
||||
from opentelemetry.trace import Link, SpanKind
|
||||
|
||||
_LOG = getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -14,13 +14,6 @@
|
|||
|
||||
# pylint: disable=no-name-in-module
|
||||
|
||||
from opentelemetry.semconv.trace import (
|
||||
SpanAttributes,
|
||||
MessagingDestinationKindValues,
|
||||
)
|
||||
from opentelemetry.test.test_base import TestBase
|
||||
from .utils import MockConsumer, MockedMessage
|
||||
|
||||
from confluent_kafka import Consumer, Producer
|
||||
|
||||
from opentelemetry.instrumentation.confluent_kafka import (
|
||||
|
|
@ -32,6 +25,13 @@ from opentelemetry.instrumentation.confluent_kafka.utils import (
|
|||
KafkaContextGetter,
|
||||
KafkaContextSetter,
|
||||
)
|
||||
from opentelemetry.semconv.trace import (
|
||||
MessagingDestinationKindValues,
|
||||
SpanAttributes,
|
||||
)
|
||||
from opentelemetry.test.test_base import TestBase
|
||||
|
||||
from .utils import MockConsumer, MockedMessage
|
||||
|
||||
|
||||
class TestConfluentKafka(TestBase):
|
||||
|
|
|
|||
|
|
@ -427,14 +427,14 @@ class CursorTracer:
|
|||
if args and self._commenter_enabled:
|
||||
try:
|
||||
args_list = list(args)
|
||||
commenter_data = dict(
|
||||
commenter_data = {
|
||||
# Psycopg2/framework information
|
||||
db_driver=f"psycopg2:{self._connect_module.__version__.split(' ')[0]}",
|
||||
dbapi_threadsafety=self._connect_module.threadsafety,
|
||||
dbapi_level=self._connect_module.apilevel,
|
||||
libpq_version=self._connect_module.__libpq_version__,
|
||||
driver_paramstyle=self._connect_module.paramstyle,
|
||||
)
|
||||
"db_driver": f"psycopg2:{self._connect_module.__version__.split(' ')[0]}",
|
||||
"dbapi_threadsafety": self._connect_module.threadsafety,
|
||||
"dbapi_level": self._connect_module.apilevel,
|
||||
"libpq_version": self._connect_module.__libpq_version__,
|
||||
"driver_paramstyle": self._connect_module.paramstyle,
|
||||
}
|
||||
if self._commenter_options.get(
|
||||
"opentelemetry_values", True
|
||||
):
|
||||
|
|
|
|||
|
|
@ -172,6 +172,7 @@ class ElasticsearchInstrumentor(BaseInstrumentor):
|
|||
)
|
||||
|
||||
def _uninstrument(self, **kwargs):
|
||||
# pylint: disable=no-member
|
||||
unwrap(elasticsearch.Transport, "perform_request")
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint: disable=unexpected-keyword-arg,missing-kwoa,no-value-for-parameter
|
||||
|
||||
import json
|
||||
import os
|
||||
|
|
|
|||
|
|
@ -40,7 +40,6 @@ from opentelemetry.util.http import (
|
|||
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS,
|
||||
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST,
|
||||
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE,
|
||||
OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE,
|
||||
OTEL_PYTHON_INSTRUMENTATION_HTTP_CAPTURE_ALL_METHODS,
|
||||
get_excluded_urls,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ class OpenTelemetryAioServerInterceptor(
|
|||
# we handle in our context wrapper.
|
||||
# Here, we're interested in uncaught exceptions.
|
||||
# pylint:disable=unidiomatic-typecheck
|
||||
if type(error) != Exception:
|
||||
if type(error) != Exception: # noqa: E721
|
||||
span.record_exception(error)
|
||||
raise error
|
||||
|
||||
|
|
@ -101,7 +101,7 @@ class OpenTelemetryAioServerInterceptor(
|
|||
|
||||
except Exception as error:
|
||||
# pylint:disable=unidiomatic-typecheck
|
||||
if type(error) != Exception:
|
||||
if type(error) != Exception: # noqa: E721
|
||||
span.record_exception(error)
|
||||
raise error
|
||||
|
||||
|
|
|
|||
|
|
@ -315,7 +315,7 @@ class OpenTelemetryServerInterceptor(grpc.ServerInterceptor):
|
|||
# we handle in our context wrapper.
|
||||
# Here, we're interested in uncaught exceptions.
|
||||
# pylint:disable=unidiomatic-typecheck
|
||||
if type(error) != Exception:
|
||||
if type(error) != Exception: # noqa: E721
|
||||
span.record_exception(error)
|
||||
raise error
|
||||
|
||||
|
|
@ -342,6 +342,6 @@ class OpenTelemetryServerInterceptor(grpc.ServerInterceptor):
|
|||
|
||||
except Exception as error:
|
||||
# pylint:disable=unidiomatic-typecheck
|
||||
if type(error) != Exception:
|
||||
if type(error) != Exception: # noqa: E721
|
||||
span.record_exception(error)
|
||||
raise error
|
||||
|
|
|
|||
|
|
@ -17,13 +17,14 @@ from typing import Callable, TypeVar
|
|||
|
||||
import grpc
|
||||
|
||||
TCallDetails = TypeVar(
|
||||
"TCallDetails",
|
||||
CallDetailsT = TypeVar(
|
||||
"CallDetailsT",
|
||||
grpc.HandlerCallDetails,
|
||||
grpc.ClientCallDetails,
|
||||
grpc.aio.ClientCallDetails,
|
||||
)
|
||||
Condition = Callable[[TCallDetails], bool]
|
||||
# pylint: disable=invalid-name
|
||||
Condition = Callable[[CallDetailsT], bool]
|
||||
|
||||
|
||||
def _full_method(metadata):
|
||||
|
|
@ -61,7 +62,7 @@ def _split_full_method(metadata):
|
|||
return (service, method)
|
||||
|
||||
|
||||
def all_of(*args: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
||||
def all_of(*args: Condition[CallDetailsT]) -> Condition[CallDetailsT]:
|
||||
"""Returns a filter function that returns True if all filter functions
|
||||
assigned matches conditions.
|
||||
|
||||
|
|
@ -79,7 +80,7 @@ def all_of(*args: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
|||
return filter_fn
|
||||
|
||||
|
||||
def any_of(*args: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
||||
def any_of(*args: Condition[CallDetailsT]) -> Condition[CallDetailsT]:
|
||||
"""Returns a filter function that returns True if any of filter functions
|
||||
assigned matches conditions.
|
||||
|
||||
|
|
@ -97,7 +98,7 @@ def any_of(*args: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
|||
return filter_fn
|
||||
|
||||
|
||||
def negate(func: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
||||
def negate(func: Condition[CallDetailsT]) -> Condition[CallDetailsT]:
|
||||
"""Returns a filter function that negate the result of func
|
||||
|
||||
Args:
|
||||
|
|
@ -113,7 +114,7 @@ def negate(func: Condition[TCallDetails]) -> Condition[TCallDetails]:
|
|||
return filter_fn
|
||||
|
||||
|
||||
def method_name(name: str) -> Condition[TCallDetails]:
|
||||
def method_name(name: str) -> Condition[CallDetailsT]:
|
||||
"""Returns a filter function that return True if
|
||||
request's gRPC method name matches name.
|
||||
|
||||
|
|
@ -132,7 +133,7 @@ def method_name(name: str) -> Condition[TCallDetails]:
|
|||
return filter_fn
|
||||
|
||||
|
||||
def method_prefix(prefix: str) -> Condition[TCallDetails]:
|
||||
def method_prefix(prefix: str) -> Condition[CallDetailsT]:
|
||||
"""Returns a filter function that return True if
|
||||
request's gRPC method name starts with prefix.
|
||||
|
||||
|
|
@ -151,7 +152,7 @@ def method_prefix(prefix: str) -> Condition[TCallDetails]:
|
|||
return filter_fn
|
||||
|
||||
|
||||
def full_method_name(name: str) -> Condition[TCallDetails]:
|
||||
def full_method_name(name: str) -> Condition[CallDetailsT]:
|
||||
"""Returns a filter function that return True if
|
||||
request's gRPC full method name matches name.
|
||||
|
||||
|
|
@ -170,7 +171,7 @@ def full_method_name(name: str) -> Condition[TCallDetails]:
|
|||
return filter_fn
|
||||
|
||||
|
||||
def service_name(name: str) -> Condition[TCallDetails]:
|
||||
def service_name(name: str) -> Condition[CallDetailsT]:
|
||||
"""Returns a filter function that return True if
|
||||
request's gRPC service name matches name.
|
||||
|
||||
|
|
@ -189,7 +190,7 @@ def service_name(name: str) -> Condition[TCallDetails]:
|
|||
return filter_fn
|
||||
|
||||
|
||||
def service_prefix(prefix: str) -> Condition[TCallDetails]:
|
||||
def service_prefix(prefix: str) -> Condition[CallDetailsT]:
|
||||
"""Returns a filter function that return True if
|
||||
request's gRPC service name starts with prefix.
|
||||
|
||||
|
|
@ -208,7 +209,7 @@ def service_prefix(prefix: str) -> Condition[TCallDetails]:
|
|||
return filter_fn
|
||||
|
||||
|
||||
def health_check() -> Condition[TCallDetails]:
|
||||
def health_check() -> Condition[CallDetailsT]:
|
||||
"""Returns a Filter that returns true if the request's
|
||||
service name is health check defined by gRPC Health Checking Protocol.
|
||||
https://github.com/grpc/grpc/blob/master/doc/health-checking.md
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint:disable=cyclic-import
|
||||
|
||||
import grpc
|
||||
from tests.protobuf import ( # pylint: disable=no-name-in-module
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint:disable=cyclic-import
|
||||
|
||||
import os
|
||||
from unittest import mock
|
||||
|
|
|
|||
|
|
@ -1,3 +1,18 @@
|
|||
# Copyright The OpenTelemetry Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint: disable=unnecessary-dunder-call
|
||||
|
||||
from unittest import TestCase, mock
|
||||
|
||||
from opentelemetry.instrumentation.kafka.utils import (
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint: disable=unnecessary-dunder-call
|
||||
|
||||
from logging import getLogger
|
||||
from typing import Any, Collection, Dict, Optional
|
||||
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ class MockConnection:
|
|||
return MockCursor()
|
||||
|
||||
def get_dsn_parameters(self): # pylint: disable=no-self-use
|
||||
return dict(dbname="test")
|
||||
return {"dbname": "test"}
|
||||
|
||||
|
||||
class TestPostgresqlIntegration(TestBase):
|
||||
|
|
|
|||
|
|
@ -169,7 +169,7 @@ def _get_address_attributes(instance):
|
|||
address_attributes[SpanAttributes.NET_PEER_NAME] = instance.server
|
||||
address_attributes[
|
||||
SpanAttributes.NET_TRANSPORT
|
||||
] = NetTransportValues.UNIX.value
|
||||
] = NetTransportValues.OTHER.value
|
||||
|
||||
return address_attributes
|
||||
|
||||
|
|
|
|||
|
|
@ -24,14 +24,15 @@ from pymemcache.exceptions import (
|
|||
MemcacheUnknownError,
|
||||
)
|
||||
|
||||
# pylint: disable=import-error,no-name-in-module
|
||||
from tests.utils import MockSocket, _str
|
||||
|
||||
from opentelemetry import trace as trace_api
|
||||
from opentelemetry.instrumentation.pymemcache import PymemcacheInstrumentor
|
||||
from opentelemetry.semconv.trace import SpanAttributes
|
||||
from opentelemetry.test.test_base import TestBase
|
||||
from opentelemetry.trace import get_tracer
|
||||
|
||||
from .utils import MockSocket, _str
|
||||
|
||||
TEST_HOST = "localhost"
|
||||
TEST_PORT = 117711
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ def _extract_conn_attributes(conn_kwargs):
|
|||
attributes[SpanAttributes.NET_PEER_NAME] = conn_kwargs.get("path", "")
|
||||
attributes[
|
||||
SpanAttributes.NET_TRANSPORT
|
||||
] = NetTransportValues.UNIX.value
|
||||
] = NetTransportValues.OTHER.value
|
||||
|
||||
return attributes
|
||||
|
||||
|
|
|
|||
|
|
@ -456,7 +456,7 @@ class TestRequestsIntegration(RequestsIntegrationTestBase, TestBase):
|
|||
@staticmethod
|
||||
def perform_request(url: str, session: requests.Session = None):
|
||||
if session is None:
|
||||
return requests.get(url)
|
||||
return requests.get(url, timeout=5)
|
||||
return session.get(url)
|
||||
|
||||
def test_credential_removal(self):
|
||||
|
|
@ -467,7 +467,7 @@ class TestRequestsIntegration(RequestsIntegrationTestBase, TestBase):
|
|||
self.assertEqual(span.attributes[SpanAttributes.HTTP_URL], self.URL)
|
||||
|
||||
def test_if_headers_equals_none(self):
|
||||
result = requests.get(self.URL, headers=None)
|
||||
result = requests.get(self.URL, headers=None, timeout=5)
|
||||
self.assertEqual(result.text, "Hello!")
|
||||
self.assert_span()
|
||||
|
||||
|
|
@ -501,7 +501,7 @@ class TestRequestsIntergrationMetric(TestBase):
|
|||
|
||||
@staticmethod
|
||||
def perform_request(url: str) -> requests.Response:
|
||||
return requests.get(url)
|
||||
return requests.get(url, timeout=5)
|
||||
|
||||
def test_basic_metric_success(self):
|
||||
self.perform_request(self.URL)
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class TestURLLib3InstrumentorWithRealSocket(HttpTestBase, TestBase):
|
|||
|
||||
@staticmethod
|
||||
def perform_request(url: str) -> requests.Response:
|
||||
return requests.get(url)
|
||||
return requests.get(url, timeout=5)
|
||||
|
||||
def test_basic_http_success(self):
|
||||
response = self.perform_request(self.http_url)
|
||||
|
|
|
|||
|
|
@ -82,6 +82,8 @@ from sklearn.tree import BaseDecisionTree
|
|||
from sklearn.utils.metaestimators import _IffHasAttrDescriptor
|
||||
|
||||
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
||||
|
||||
# pylint: disable=no-name-in-module
|
||||
from opentelemetry.instrumentation.sklearn.package import _instruments
|
||||
from opentelemetry.instrumentation.sklearn.version import __version__
|
||||
from opentelemetry.trace import get_tracer
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@
|
|||
|
||||
from sklearn.ensemble import RandomForestClassifier
|
||||
|
||||
# pylint: disable=no-name-in-module
|
||||
from opentelemetry.instrumentation.sklearn import (
|
||||
DEFAULT_EXCLUDE_CLASSES,
|
||||
DEFAULT_METHODS,
|
||||
|
|
|
|||
|
|
@ -224,11 +224,11 @@ class EngineTracer:
|
|||
for key, value in attrs.items():
|
||||
span.set_attribute(key, value)
|
||||
if self.enable_commenter:
|
||||
commenter_data = dict(
|
||||
db_driver=conn.engine.driver,
|
||||
commenter_data = {
|
||||
"db_driver": conn.engine.driver,
|
||||
# Driver/framework centric information.
|
||||
db_framework=f"sqlalchemy:{__version__}",
|
||||
)
|
||||
"db_framework": f"sqlalchemy:{__version__}",
|
||||
}
|
||||
|
||||
if self.commenter_options.get("opentelemetry_values", True):
|
||||
commenter_data.update(**_get_opentelemetry_values())
|
||||
|
|
@ -296,7 +296,9 @@ def _get_attributes_from_cursor(vendor, cursor, attrs):
|
|||
is_unix_socket = info.host and info.host.startswith("/")
|
||||
|
||||
if is_unix_socket:
|
||||
attrs[SpanAttributes.NET_TRANSPORT] = NetTransportValues.UNIX.value
|
||||
attrs[
|
||||
SpanAttributes.NET_TRANSPORT
|
||||
] = NetTransportValues.OTHER.value
|
||||
if info.port:
|
||||
# postgresql enforces this pattern on all socket names
|
||||
attrs[SpanAttributes.NET_PEER_NAME] = os.path.join(
|
||||
|
|
|
|||
|
|
@ -76,9 +76,9 @@ API
|
|||
"""
|
||||
|
||||
import gc
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import threading
|
||||
from platform import python_implementation
|
||||
from typing import Collection, Dict, Iterable, List, Optional
|
||||
|
|
@ -363,7 +363,7 @@ class SystemMetricsInstrumentor(BaseInstrumentor):
|
|||
if "process.runtime.gc_count" in self._config:
|
||||
if self._python_implementation == "pypy":
|
||||
_logger.warning(
|
||||
"The process.runtime.gc_count metric won't be collected because the interpreter is PyPy"
|
||||
"The process.runtime.gc_count metric won't be collected because the interpreter is PyPy"
|
||||
)
|
||||
else:
|
||||
self._meter.create_observable_counter(
|
||||
|
|
@ -372,7 +372,6 @@ class SystemMetricsInstrumentor(BaseInstrumentor):
|
|||
description=f"Runtime {self._python_implementation} GC count",
|
||||
unit="bytes",
|
||||
)
|
||||
|
||||
|
||||
if "process.runtime.thread_count" in self._config:
|
||||
self._meter.create_observable_up_down_counter(
|
||||
|
|
|
|||
|
|
@ -18,13 +18,12 @@ from collections import namedtuple
|
|||
from platform import python_implementation
|
||||
from unittest import mock, skipIf
|
||||
|
||||
from opentelemetry.sdk.metrics import MeterProvider
|
||||
from opentelemetry.sdk.metrics.export import InMemoryMetricReader
|
||||
from opentelemetry.test.test_base import TestBase
|
||||
|
||||
from opentelemetry.instrumentation.system_metrics import (
|
||||
SystemMetricsInstrumentor,
|
||||
)
|
||||
from opentelemetry.sdk.metrics import MeterProvider
|
||||
from opentelemetry.sdk.metrics.export import InMemoryMetricReader
|
||||
from opentelemetry.test.test_base import TestBase
|
||||
|
||||
|
||||
def _mock_netconnection():
|
||||
|
|
@ -120,12 +119,14 @@ class TestSystemMetrics(TestBase):
|
|||
f"process.runtime.{self.implementation}.context_switches",
|
||||
f"process.runtime.{self.implementation}.cpu.utilization",
|
||||
]
|
||||
|
||||
|
||||
if self.implementation == "pypy":
|
||||
self.assertEqual(len(metric_names), 20)
|
||||
else:
|
||||
self.assertEqual(len(metric_names), 21)
|
||||
observer_names.append(f"process.runtime.{self.implementation}.gc_count",)
|
||||
observer_names.append(
|
||||
f"process.runtime.{self.implementation}.gc_count",
|
||||
)
|
||||
|
||||
for observer in metric_names:
|
||||
self.assertIn(observer, observer_names)
|
||||
|
|
@ -139,7 +140,7 @@ class TestSystemMetrics(TestBase):
|
|||
"process.runtime.cpu.utilization": None,
|
||||
"process.runtime.context_switches": ["involuntary", "voluntary"],
|
||||
}
|
||||
|
||||
|
||||
if self.implementation != "pypy":
|
||||
runtime_config["process.runtime.gc_count"] = None
|
||||
|
||||
|
|
@ -166,7 +167,9 @@ class TestSystemMetrics(TestBase):
|
|||
self.assertEqual(len(metric_names), 5)
|
||||
else:
|
||||
self.assertEqual(len(metric_names), 6)
|
||||
observer_names.append(f"process.runtime.{self.implementation}.gc_count")
|
||||
observer_names.append(
|
||||
f"process.runtime.{self.implementation}.gc_count"
|
||||
)
|
||||
|
||||
for observer in metric_names:
|
||||
self.assertIn(observer, observer_names)
|
||||
|
|
@ -181,9 +184,9 @@ class TestSystemMetrics(TestBase):
|
|||
for data_point in metric.data.data_points:
|
||||
for expect in expected:
|
||||
if (
|
||||
dict(data_point.attributes)
|
||||
== expect.attributes
|
||||
and metric.name == observer_name
|
||||
dict(data_point.attributes)
|
||||
== expect.attributes
|
||||
and metric.name == observer_name
|
||||
):
|
||||
self.assertEqual(
|
||||
data_point.value,
|
||||
|
|
@ -791,7 +794,9 @@ class TestSystemMetrics(TestBase):
|
|||
)
|
||||
|
||||
@mock.patch("gc.get_count")
|
||||
@skipIf(python_implementation().lower() == "pypy", "not supported for pypy")
|
||||
@skipIf(
|
||||
python_implementation().lower() == "pypy", "not supported for pypy"
|
||||
)
|
||||
def test_runtime_get_count(self, mock_gc_get_count):
|
||||
mock_gc_get_count.configure_mock(**{"return_value": (1, 2, 3)})
|
||||
|
||||
|
|
|
|||
|
|
@ -239,8 +239,8 @@ def _instrument(
|
|||
token = context.attach(
|
||||
context.set_value(_SUPPRESS_HTTP_INSTRUMENTATION_KEY, True)
|
||||
)
|
||||
start_time = default_timer()
|
||||
try:
|
||||
start_time = default_timer()
|
||||
result = call_wrapped() # *** PROCEED
|
||||
except Exception as exc: # pylint: disable=W0703
|
||||
exception = exc
|
||||
|
|
|
|||
|
|
@ -13,14 +13,14 @@
|
|||
# limitations under the License.
|
||||
|
||||
|
||||
from platform import python_implementation
|
||||
from sys import version_info
|
||||
from timeit import default_timer
|
||||
from urllib import request
|
||||
from urllib.parse import urlencode
|
||||
from pytest import mark
|
||||
from platform import python_implementation
|
||||
from sys import version_info
|
||||
|
||||
import httpretty
|
||||
from pytest import mark
|
||||
|
||||
from opentelemetry.instrumentation.urllib import ( # pylint: disable=no-name-in-module,import-error
|
||||
URLLibInstrumentor,
|
||||
|
|
@ -190,22 +190,16 @@ class TestUrllibMetricsInstrumentation(TestBase):
|
|||
|
||||
@mark.skipif(
|
||||
python_implementation() == "PyPy" or version_info.minor == 7,
|
||||
reason="Fails randomly in 3.7 and pypy"
|
||||
reason="Fails randomly in 3.7 and pypy",
|
||||
)
|
||||
def test_metric_uninstrument(self):
|
||||
with request.urlopen(self.URL):
|
||||
metrics = self.get_sorted_metrics()
|
||||
self.assertEqual(len(metrics), 3)
|
||||
|
||||
self.assertEqual(
|
||||
metrics[0].data.data_points[0].sum, 1
|
||||
)
|
||||
self.assertEqual(
|
||||
metrics[1].data.data_points[0].sum, 0
|
||||
)
|
||||
self.assertEqual(
|
||||
metrics[2].data.data_points[0].sum, 6
|
||||
)
|
||||
self.assertEqual(metrics[0].data.data_points[0].sum, 1)
|
||||
self.assertEqual(metrics[1].data.data_points[0].sum, 0)
|
||||
self.assertEqual(metrics[2].data.data_points[0].sum, 6)
|
||||
|
||||
URLLibInstrumentor().uninstrument()
|
||||
with request.urlopen(self.URL):
|
||||
|
|
|
|||
|
|
@ -78,6 +78,7 @@ def create_gen_wsgi(response):
|
|||
|
||||
def error_wsgi(environ, start_response):
|
||||
assert isinstance(environ, dict)
|
||||
exc_info = None
|
||||
try:
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ def _pip_check():
|
|||
) as check_pipe:
|
||||
pip_check = check_pipe.communicate()[0].decode()
|
||||
pip_check_lower = pip_check.lower()
|
||||
for package_tup in libraries.values():
|
||||
for package_tup in libraries:
|
||||
for package in package_tup:
|
||||
if package.lower() in pip_check_lower:
|
||||
raise RuntimeError(f"Dependency conflict found: {pip_check}")
|
||||
|
|
@ -102,15 +102,12 @@ def _is_installed(req):
|
|||
|
||||
|
||||
def _find_installed_libraries():
|
||||
libs = default_instrumentations[:]
|
||||
libs.extend(
|
||||
[
|
||||
v["instrumentation"]
|
||||
for _, v in libraries.items()
|
||||
if _is_installed(v["library"])
|
||||
]
|
||||
)
|
||||
return libs
|
||||
for lib in default_instrumentations:
|
||||
yield lib
|
||||
|
||||
for lib in libraries:
|
||||
if _is_installed(lib["library"]):
|
||||
yield lib["instrumentation"]
|
||||
|
||||
|
||||
def _run_requirements():
|
||||
|
|
|
|||
|
|
@ -15,176 +15,176 @@
|
|||
# DO NOT EDIT. THIS FILE WAS AUTOGENERATED FROM INSTRUMENTATION PACKAGES.
|
||||
# RUN `python scripts/generate_instrumentation_bootstrap.py` TO REGENERATE.
|
||||
|
||||
libraries = {
|
||||
"aio_pika": {
|
||||
libraries = [
|
||||
{
|
||||
"library": "aio_pika >= 7.2.0, < 10.0.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-aio-pika==0.43b0.dev",
|
||||
},
|
||||
"aiohttp": {
|
||||
{
|
||||
"library": "aiohttp ~= 3.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-aiohttp-client==0.43b0.dev",
|
||||
},
|
||||
"aiohttp": {
|
||||
{
|
||||
"library": "aiohttp ~= 3.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-aiohttp-server==0.43b0.dev",
|
||||
},
|
||||
"aiopg": {
|
||||
{
|
||||
"library": "aiopg >= 0.13.0, < 2.0.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-aiopg==0.43b0.dev",
|
||||
},
|
||||
"asgiref": {
|
||||
{
|
||||
"library": "asgiref ~= 3.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-asgi==0.43b0.dev",
|
||||
},
|
||||
"asyncpg": {
|
||||
{
|
||||
"library": "asyncpg >= 0.12.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-asyncpg==0.43b0.dev",
|
||||
},
|
||||
"boto": {
|
||||
{
|
||||
"library": "boto~=2.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-boto==0.43b0.dev",
|
||||
},
|
||||
"boto3": {
|
||||
{
|
||||
"library": "boto3 ~= 1.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-boto3sqs==0.43b0.dev",
|
||||
},
|
||||
"botocore": {
|
||||
{
|
||||
"library": "botocore ~= 1.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-botocore==0.43b0.dev",
|
||||
},
|
||||
"cassandra-driver": {
|
||||
{
|
||||
"library": "cassandra-driver ~= 3.25",
|
||||
"instrumentation": "opentelemetry-instrumentation-cassandra==0.43b0.dev",
|
||||
},
|
||||
"scylla-driver": {
|
||||
{
|
||||
"library": "scylla-driver ~= 3.25",
|
||||
"instrumentation": "opentelemetry-instrumentation-cassandra==0.43b0.dev",
|
||||
},
|
||||
"celery": {
|
||||
{
|
||||
"library": "celery >= 4.0, < 6.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-celery==0.43b0.dev",
|
||||
},
|
||||
"confluent-kafka": {
|
||||
{
|
||||
"library": "confluent-kafka >= 1.8.2, <= 2.2.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-confluent-kafka==0.43b0.dev",
|
||||
},
|
||||
"django": {
|
||||
{
|
||||
"library": "django >= 1.10",
|
||||
"instrumentation": "opentelemetry-instrumentation-django==0.43b0.dev",
|
||||
},
|
||||
"elasticsearch": {
|
||||
{
|
||||
"library": "elasticsearch >= 2.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-elasticsearch==0.43b0.dev",
|
||||
},
|
||||
"falcon": {
|
||||
{
|
||||
"library": "falcon >= 1.4.1, < 4.0.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-falcon==0.43b0.dev",
|
||||
},
|
||||
"fastapi": {
|
||||
{
|
||||
"library": "fastapi ~= 0.58",
|
||||
"instrumentation": "opentelemetry-instrumentation-fastapi==0.43b0.dev",
|
||||
},
|
||||
"flask": {
|
||||
{
|
||||
"library": "flask >= 1.0, < 3.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-flask==0.43b0.dev",
|
||||
},
|
||||
"werkzeug": {
|
||||
{
|
||||
"library": "werkzeug < 3.0.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-flask==0.43b0.dev",
|
||||
},
|
||||
"grpcio": {
|
||||
{
|
||||
"library": "grpcio ~= 1.27",
|
||||
"instrumentation": "opentelemetry-instrumentation-grpc==0.43b0.dev",
|
||||
},
|
||||
"httpx": {
|
||||
{
|
||||
"library": "httpx >= 0.18.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-httpx==0.43b0.dev",
|
||||
},
|
||||
"jinja2": {
|
||||
{
|
||||
"library": "jinja2 >= 2.7, < 4.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-jinja2==0.43b0.dev",
|
||||
},
|
||||
"kafka-python": {
|
||||
{
|
||||
"library": "kafka-python >= 2.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-kafka-python==0.43b0.dev",
|
||||
},
|
||||
"mysql-connector-python": {
|
||||
{
|
||||
"library": "mysql-connector-python ~= 8.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-mysql==0.43b0.dev",
|
||||
},
|
||||
"mysqlclient": {
|
||||
{
|
||||
"library": "mysqlclient < 3",
|
||||
"instrumentation": "opentelemetry-instrumentation-mysqlclient==0.43b0.dev",
|
||||
},
|
||||
"pika": {
|
||||
{
|
||||
"library": "pika >= 0.12.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-pika==0.43b0.dev",
|
||||
},
|
||||
"psycopg2": {
|
||||
{
|
||||
"library": "psycopg2 >= 2.7.3.1",
|
||||
"instrumentation": "opentelemetry-instrumentation-psycopg2==0.43b0.dev",
|
||||
},
|
||||
"pymemcache": {
|
||||
{
|
||||
"library": "pymemcache >= 1.3.5, < 5",
|
||||
"instrumentation": "opentelemetry-instrumentation-pymemcache==0.43b0.dev",
|
||||
},
|
||||
"pymongo": {
|
||||
{
|
||||
"library": "pymongo >= 3.1, < 5.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-pymongo==0.43b0.dev",
|
||||
},
|
||||
"PyMySQL": {
|
||||
{
|
||||
"library": "PyMySQL < 2",
|
||||
"instrumentation": "opentelemetry-instrumentation-pymysql==0.43b0.dev",
|
||||
},
|
||||
"pyramid": {
|
||||
{
|
||||
"library": "pyramid >= 1.7",
|
||||
"instrumentation": "opentelemetry-instrumentation-pyramid==0.43b0.dev",
|
||||
},
|
||||
"redis": {
|
||||
{
|
||||
"library": "redis >= 2.6",
|
||||
"instrumentation": "opentelemetry-instrumentation-redis==0.43b0.dev",
|
||||
},
|
||||
"remoulade": {
|
||||
{
|
||||
"library": "remoulade >= 0.50",
|
||||
"instrumentation": "opentelemetry-instrumentation-remoulade==0.43b0.dev",
|
||||
},
|
||||
"requests": {
|
||||
{
|
||||
"library": "requests ~= 2.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-requests==0.43b0.dev",
|
||||
},
|
||||
"scikit-learn": {
|
||||
{
|
||||
"library": "scikit-learn ~= 0.24.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-sklearn==0.43b0.dev",
|
||||
},
|
||||
"sqlalchemy": {
|
||||
{
|
||||
"library": "sqlalchemy",
|
||||
"instrumentation": "opentelemetry-instrumentation-sqlalchemy==0.43b0.dev",
|
||||
},
|
||||
"starlette": {
|
||||
{
|
||||
"library": "starlette ~= 0.13.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-starlette==0.43b0.dev",
|
||||
},
|
||||
"psutil": {
|
||||
{
|
||||
"library": "psutil >= 5",
|
||||
"instrumentation": "opentelemetry-instrumentation-system-metrics==0.43b0.dev",
|
||||
},
|
||||
"tornado": {
|
||||
{
|
||||
"library": "tornado >= 5.1.1",
|
||||
"instrumentation": "opentelemetry-instrumentation-tornado==0.43b0.dev",
|
||||
},
|
||||
"tortoise-orm": {
|
||||
{
|
||||
"library": "tortoise-orm >= 0.17.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-tortoiseorm==0.43b0.dev",
|
||||
},
|
||||
"pydantic": {
|
||||
{
|
||||
"library": "pydantic >= 1.10.2",
|
||||
"instrumentation": "opentelemetry-instrumentation-tortoiseorm==0.43b0.dev",
|
||||
},
|
||||
"urllib3": {
|
||||
{
|
||||
"library": "urllib3 >= 1.0.0, < 3.0.0",
|
||||
"instrumentation": "opentelemetry-instrumentation-urllib3==0.43b0.dev",
|
||||
},
|
||||
}
|
||||
]
|
||||
default_instrumentations = [
|
||||
"opentelemetry-instrumentation-aws-lambda==0.43b0.dev",
|
||||
"opentelemetry-instrumentation-dbapi==0.43b0.dev",
|
||||
|
|
|
|||
|
|
@ -206,9 +206,10 @@ class _OpenTelemetrySemanticConventionStability:
|
|||
|
||||
@classmethod
|
||||
def _get_opentelemetry_stability_opt_in(
|
||||
type: _OpenTelemetryStabilitySignalType,
|
||||
cls,
|
||||
signal_type: _OpenTelemetryStabilitySignalType,
|
||||
) -> _OpenTelemetryStabilityMode:
|
||||
with _OpenTelemetrySemanticConventionStability._lock:
|
||||
return _OpenTelemetrySemanticConventionStability._OTEL_SEMCONV_STABILITY_SIGNAL_MAPPING.get(
|
||||
type, _OpenTelemetryStabilityMode.DEFAULT
|
||||
signal_type, _OpenTelemetryStabilityMode.DEFAULT
|
||||
)
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class TestBootstrap(TestCase):
|
|||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.installed_libraries = sample_packages(
|
||||
[lib["instrumentation"] for lib in libraries.values()], 0.6
|
||||
[lib["instrumentation"] for lib in libraries], 0.6
|
||||
)
|
||||
|
||||
# treat 50% of sampled packages as pre-installed
|
||||
|
|
|
|||
|
|
@ -14,8 +14,12 @@
|
|||
|
||||
from os import environ
|
||||
|
||||
from opentelemetry.sdk.resources import ResourceDetector, Resource
|
||||
from opentelemetry.semconv.resource import ResourceAttributes, CloudPlatformValues, CloudProviderValues
|
||||
from opentelemetry.sdk.resources import Resource, ResourceDetector
|
||||
from opentelemetry.semconv.resource import (
|
||||
CloudPlatformValues,
|
||||
CloudProviderValues,
|
||||
ResourceAttributes,
|
||||
)
|
||||
|
||||
_AZURE_APP_SERVICE_STAMP_RESOURCE_ATTRIBUTE = "azure.app.service.stamp"
|
||||
_REGION_NAME = "REGION_NAME"
|
||||
|
|
@ -36,18 +40,25 @@ _APP_SERVICE_ATTRIBUTE_ENV_VARS = {
|
|||
_AZURE_APP_SERVICE_STAMP_RESOURCE_ATTRIBUTE: _WEBSITE_HOME_STAMPNAME,
|
||||
}
|
||||
|
||||
|
||||
class AzureAppServiceResourceDetector(ResourceDetector):
|
||||
def detect(self) -> Resource:
|
||||
attributes = {}
|
||||
website_site_name = environ.get(_WEBSITE_SITE_NAME)
|
||||
if website_site_name:
|
||||
attributes[ResourceAttributes.SERVICE_NAME] = website_site_name
|
||||
attributes[ResourceAttributes.CLOUD_PROVIDER] = CloudProviderValues.AZURE.value
|
||||
attributes[ResourceAttributes.CLOUD_PLATFORM] = CloudPlatformValues.AZURE_APP_SERVICE.value
|
||||
attributes[
|
||||
ResourceAttributes.CLOUD_PROVIDER
|
||||
] = CloudProviderValues.AZURE.value
|
||||
attributes[
|
||||
ResourceAttributes.CLOUD_PLATFORM
|
||||
] = CloudPlatformValues.AZURE_APP_SERVICE.value
|
||||
|
||||
azure_resource_uri = _get_azure_resource_uri(website_site_name)
|
||||
if azure_resource_uri:
|
||||
attributes[ResourceAttributes.CLOUD_RESOURCE_ID] = azure_resource_uri
|
||||
attributes[
|
||||
ResourceAttributes.CLOUD_RESOURCE_ID
|
||||
] = azure_resource_uri
|
||||
for (key, env_var) in _APP_SERVICE_ATTRIBUTE_ENV_VARS.items():
|
||||
value = environ.get(env_var)
|
||||
if value:
|
||||
|
|
@ -55,19 +66,16 @@ class AzureAppServiceResourceDetector(ResourceDetector):
|
|||
|
||||
return Resource(attributes)
|
||||
|
||||
|
||||
def _get_azure_resource_uri(website_site_name):
|
||||
website_resource_group = environ.get(_WEBSITE_RESOURCE_GROUP)
|
||||
website_owner_name = environ.get(_WEBSITE_OWNER_NAME)
|
||||
|
||||
subscription_id = website_owner_name
|
||||
if website_owner_name and '+' in website_owner_name:
|
||||
subscription_id = website_owner_name[0:website_owner_name.index('+')]
|
||||
if website_owner_name and "+" in website_owner_name:
|
||||
subscription_id = website_owner_name[0 : website_owner_name.index("+")]
|
||||
|
||||
if not (website_resource_group and subscription_id):
|
||||
return None
|
||||
|
||||
return "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Web/sites/%s" % (
|
||||
subscription_id,
|
||||
website_resource_group,
|
||||
website_site_name,
|
||||
)
|
||||
return f"/subscriptions/{subscription_id}/resourceGroups/{website_resource_group}/providers/Microsoft.Web/sites/{website_site_name}"
|
||||
|
|
|
|||
|
|
@ -14,18 +14,16 @@
|
|||
|
||||
from json import loads
|
||||
from logging import getLogger
|
||||
from os import environ
|
||||
from urllib.request import Request, urlopen
|
||||
from urllib.error import URLError
|
||||
from urllib.request import Request, urlopen
|
||||
|
||||
from opentelemetry.sdk.resources import ResourceDetector, Resource
|
||||
from opentelemetry.sdk.resources import Resource, ResourceDetector
|
||||
from opentelemetry.semconv.resource import (
|
||||
ResourceAttributes,
|
||||
CloudPlatformValues,
|
||||
CloudProviderValues,
|
||||
ResourceAttributes,
|
||||
)
|
||||
|
||||
|
||||
# TODO: Remove when cloud resource id is no longer missing in Resource Attributes
|
||||
_AZURE_VM_METADATA_ENDPOINT = "http://169.254.169.254/metadata/instance/compute?api-version=2021-12-13&format=json"
|
||||
_AZURE_VM_SCALE_SET_NAME_ATTRIBUTE = "azure.vm.scaleset.name"
|
||||
|
|
@ -67,20 +65,22 @@ class AzureVMResourceDetector(ResourceDetector):
|
|||
|
||||
|
||||
class _AzureVMMetadataServiceRequestor:
|
||||
def get_azure_vm_metadata(self):
|
||||
def get_azure_vm_metadata(self): # pylint: disable=no-self-use
|
||||
request = Request(_AZURE_VM_METADATA_ENDPOINT)
|
||||
request.add_header("Metadata", "True")
|
||||
try:
|
||||
response = urlopen(request).read()
|
||||
return loads(response)
|
||||
with urlopen(request).read() as response:
|
||||
return loads(response)
|
||||
except URLError:
|
||||
# Not on Azure VM
|
||||
return None
|
||||
except Exception as e:
|
||||
except Exception as e: # pylint: disable=broad-except,invalid-name
|
||||
_logger.exception("Failed to receive Azure VM metadata: %s", e)
|
||||
return None
|
||||
|
||||
def get_attribute_from_metadata(self, metadata_json, attribute_key):
|
||||
def get_attribute_from_metadata(
|
||||
self, metadata_json, attribute_key
|
||||
): # pylint: disable=no-self-use
|
||||
ams_value = ""
|
||||
if attribute_key == _AZURE_VM_SCALE_SET_NAME_ATTRIBUTE:
|
||||
ams_value = metadata_json["vmScaleSetName"]
|
||||
|
|
@ -94,9 +94,9 @@ class _AzureVMMetadataServiceRequestor:
|
|||
ams_value = metadata_json["location"]
|
||||
elif attribute_key == ResourceAttributes.CLOUD_RESOURCE_ID:
|
||||
ams_value = metadata_json["resourceId"]
|
||||
elif (
|
||||
attribute_key == ResourceAttributes.HOST_ID
|
||||
or attribute_key == ResourceAttributes.SERVICE_INSTANCE_ID
|
||||
elif attribute_key in (
|
||||
ResourceAttributes.HOST_ID,
|
||||
ResourceAttributes.SERVICE_INSTANCE_ID,
|
||||
):
|
||||
ams_value = metadata_json["vmId"]
|
||||
elif attribute_key == ResourceAttributes.HOST_NAME:
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@
|
|||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
# pylint: disable=no-name-in-module
|
||||
from opentelemetry.resource.detector.azure.app_service import (
|
||||
AzureAppServiceResourceDetector,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -12,12 +12,10 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import unittest
|
||||
from unittest.mock import patch, Mock
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from opentelemetry.semconv.resource import ResourceAttributes
|
||||
from opentelemetry.resource.detector.azure.vm import (
|
||||
AzureVMResourceDetector,
|
||||
)
|
||||
# pylint: disable=no-name-in-module
|
||||
from opentelemetry.resource.detector.azure.vm import AzureVMResourceDetector
|
||||
|
||||
LINUX_JSON = """
|
||||
{
|
||||
|
|
@ -369,10 +367,8 @@ class TestAzureVMResourceDetector(unittest.TestCase):
|
|||
mock_urlopen.return_value = mock_open
|
||||
mock_open.read.return_value = LINUX_JSON
|
||||
attributes = AzureVMResourceDetector().detect().attributes
|
||||
for attribute_key in LINUX_ATTRIBUTES:
|
||||
self.assertEqual(
|
||||
attributes[attribute_key], LINUX_ATTRIBUTES[attribute_key]
|
||||
)
|
||||
for attribute_key, attribute_value in LINUX_ATTRIBUTES.items():
|
||||
self.assertEqual(attributes[attribute_key], attribute_value)
|
||||
|
||||
@patch("opentelemetry.resource.detector.azure.vm.urlopen")
|
||||
def test_windows(self, mock_urlopen):
|
||||
|
|
@ -380,7 +376,5 @@ class TestAzureVMResourceDetector(unittest.TestCase):
|
|||
mock_urlopen.return_value = mock_open
|
||||
mock_open.read.return_value = WINDOWS_JSON
|
||||
attributes = AzureVMResourceDetector().detect().attributes
|
||||
for attribute_key in WINDOWS_ATTRIBUTES:
|
||||
self.assertEqual(
|
||||
attributes[attribute_key], WINDOWS_ATTRIBUTES[attribute_key]
|
||||
)
|
||||
for attribute_key, attribute_value in LINUX_ATTRIBUTES.items():
|
||||
self.assertEqual(attributes[attribute_key], attribute_value)
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@ import subprocess
|
|||
import sys
|
||||
|
||||
import astor
|
||||
import pkg_resources
|
||||
from otel_packaging import (
|
||||
get_instrumentation_packages,
|
||||
root_path,
|
||||
|
|
@ -58,14 +57,12 @@ gen_path = os.path.join(
|
|||
def main():
|
||||
# pylint: disable=no-member
|
||||
default_instrumentations = ast.List(elts=[])
|
||||
libraries = ast.Dict(keys=[], values=[])
|
||||
libraries = ast.List(elts=[])
|
||||
for pkg in get_instrumentation_packages():
|
||||
if not pkg["instruments"]:
|
||||
default_instrumentations.elts.append(ast.Str(pkg["requirement"]))
|
||||
for target_pkg in pkg["instruments"]:
|
||||
parsed = pkg_resources.Requirement.parse(target_pkg)
|
||||
libraries.keys.append(ast.Str(parsed.name))
|
||||
libraries.values.append(
|
||||
libraries.elts.append(
|
||||
ast.Dict(
|
||||
keys=[ast.Str("library"), ast.Str("instrumentation")],
|
||||
values=[ast.Str(target_pkg), ast.Str(pkg["requirement"])],
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ WORKFLOW_FILE = ".github/workflows/test.yml"
|
|||
|
||||
def get_sha(branch):
|
||||
url = API_URL + branch
|
||||
response = requests.get(url)
|
||||
response = requests.get(url, timeout=15)
|
||||
response.raise_for_status()
|
||||
return response.json()["sha"]
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint:disable=no-name-in-module
|
||||
|
||||
from opentelemetry.sdk.extension.aws.resource._lambda import (
|
||||
AwsLambdaResourceDetector,
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint:disable=no-name-in-module
|
||||
|
||||
from opentelemetry.sdk.extension.aws.trace.aws_xray_id_generator import (
|
||||
AwsXRayIdGenerator,
|
||||
|
|
|
|||
|
|
@ -77,8 +77,7 @@ class AwsXRayIdGenerator(IdGenerator):
|
|||
def generate_span_id(self) -> int:
|
||||
return self.random_id_generator.generate_span_id()
|
||||
|
||||
@staticmethod
|
||||
def generate_trace_id() -> int:
|
||||
def generate_trace_id(self) -> int:
|
||||
trace_time = int(time.time())
|
||||
trace_identifier = random.getrandbits(96)
|
||||
return (trace_time << 96) + trace_identifier
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import datetime
|
|||
import time
|
||||
import unittest
|
||||
|
||||
# pylint: disable=no-name-in-module
|
||||
from opentelemetry.sdk.extension.aws.trace import AwsXRayIdGenerator
|
||||
from opentelemetry.trace.span import INVALID_TRACE_ID
|
||||
|
||||
|
|
|
|||
27
tox.ini
27
tox.ini
|
|
@ -507,7 +507,7 @@ commands =
|
|||
sphinx-build -E -a -W -b html -T . _build/html
|
||||
|
||||
[testenv:spellcheck]
|
||||
basepython: python3.10
|
||||
basepython: python3
|
||||
recreate = True
|
||||
deps =
|
||||
codespell
|
||||
|
|
@ -516,17 +516,10 @@ commands =
|
|||
codespell
|
||||
|
||||
[testenv:lint]
|
||||
basepython: python3.9
|
||||
recreate = False
|
||||
basepython: python3
|
||||
recreate = True
|
||||
deps =
|
||||
-c dev-requirements.txt
|
||||
flaky
|
||||
pylint
|
||||
flake8
|
||||
isort
|
||||
black
|
||||
readme_renderer
|
||||
httpretty
|
||||
-r dev-requirements.txt
|
||||
|
||||
commands_pre =
|
||||
python -m pip install "{env:CORE_REPO}#egg=opentelemetry-api&subdirectory=opentelemetry-api"
|
||||
|
|
@ -551,7 +544,7 @@ commands_pre =
|
|||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-celery[test]
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-pika[test]
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-aio-pika[test]
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-sklearn[test]
|
||||
; python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-sklearn[test]
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-redis[test]
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-remoulade[test]
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-fastapi[test]
|
||||
|
|
@ -570,6 +563,8 @@ commands_pre =
|
|||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-urllib[test]
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-urllib3[test]
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-pymysql[test]
|
||||
# prerequisite: follow the instructions here https://github.com/PyMySQL/mysqlclient#install
|
||||
# for your OS to install the required dependencies
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-mysqlclient[test]
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-pymongo[test]
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-elasticsearch[test]
|
||||
|
|
@ -581,7 +576,7 @@ commands_pre =
|
|||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-aws-lambda[test]
|
||||
python -m pip install -e {toxinidir}/instrumentation/opentelemetry-instrumentation-system-metrics[test]
|
||||
python -m pip install -e {toxinidir}/exporter/opentelemetry-exporter-richconsole[test]
|
||||
python -m pip install -e {toxinidir}/exporter/opentelemetry-exporter-prometheus-remote-write[test]
|
||||
# requires snappy headers to be available on the system
|
||||
python -m pip install -e {toxinidir}/sdk-extension/opentelemetry-sdk-extension-aws[test]
|
||||
python -m pip install -e {toxinidir}/resource/opentelemetry-resource-detector-container[test]
|
||||
python -m pip install -e {toxinidir}/propagator/opentelemetry-propagator-aws-xray[test]
|
||||
|
|
@ -592,7 +587,7 @@ commands =
|
|||
python scripts/eachdist.py lint --check-only
|
||||
|
||||
[testenv:docker-tests]
|
||||
basepython: python3.10
|
||||
basepython: python3
|
||||
deps =
|
||||
pip >= 20.3.3
|
||||
pytest
|
||||
|
|
@ -601,6 +596,9 @@ deps =
|
|||
mysql-connector-python ~= 8.0
|
||||
pymongo >= 3.1, < 5.0
|
||||
PyMySQL ~= 0.10.1
|
||||
# prerequisite: install libpq-dev (debian) or postgresql-devel (rhel), postgresql (mac)
|
||||
# see https://www.psycopg.org/docs/install.html#build-prerequisites
|
||||
# you might have to install additional packages depending on your OS
|
||||
psycopg2 ~= 2.9.5
|
||||
aiopg >= 0.13.0, < 1.3.0
|
||||
sqlalchemy ~= 1.4
|
||||
|
|
@ -608,6 +606,7 @@ deps =
|
|||
celery[pytest] >= 4.0, < 6.0
|
||||
protobuf~=3.13
|
||||
requests==2.25.0
|
||||
# prerequisite: install unixodbc
|
||||
pyodbc~=4.0.30
|
||||
flaky==3.7.0
|
||||
remoulade>=0.50
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ def trysetip(conn: http.client.HTTPConnection, loglevel=logging.DEBUG) -> bool:
|
|||
state = _getstate()
|
||||
if not state:
|
||||
return True
|
||||
spanlist = state.get("need_ip") # type: typing.List[Span]
|
||||
spanlist: typing.List[Span] = state.get("need_ip")
|
||||
if not spanlist:
|
||||
return True
|
||||
|
||||
|
|
@ -88,7 +88,7 @@ def trysetip(conn: http.client.HTTPConnection, loglevel=logging.DEBUG) -> bool:
|
|||
|
||||
sock = "<property not accessed>"
|
||||
try:
|
||||
sock = conn.sock # type: typing.Optional[socket.socket]
|
||||
sock: typing.Optional[socket.socket] = conn.sock
|
||||
logger.debug("Got socket: %s", sock)
|
||||
if sock is None:
|
||||
return False
|
||||
|
|
@ -163,7 +163,7 @@ def set_ip_on_next_http_connection(span: Span):
|
|||
finally:
|
||||
context.detach(token)
|
||||
else:
|
||||
spans = state["need_ip"] # type: typing.List[Span]
|
||||
spans: typing.List[Span] = state["need_ip"]
|
||||
spans.append(span)
|
||||
try:
|
||||
yield
|
||||
|
|
|
|||
Loading…
Reference in New Issue