From d1904b9529fc096b9ad8c13febf26f0876f7cef4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Em=C3=ADdio=20Neto?= <9735060+emdneto@users.noreply.github.com> Date: Mon, 14 Oct 2024 14:27:50 -0300 Subject: [PATCH] Add support to protobuf5 for opentelemetry-proto and regenerate proto files (#4206) --- .github/workflows/lint_0.yml | 6 +- .github/workflows/misc_0.yml | 12 +- .github/workflows/test_0.yml | 1599 +++++------------ CHANGELOG.md | 2 + docs-requirements.txt | 5 +- .../test-requirements-1.txt | 19 - ...quirements-0.txt => test-requirements.txt} | 2 +- .../pyproject.toml | 2 +- .../test-requirements-1.txt | 22 - ...quirements-0.txt => test-requirements.txt} | 6 +- .../tests/logs/test_otlp_logs_exporter.py | 4 +- .../tests/test_otlp_exporter_mixin.py | 4 +- .../tests/test_otlp_metrics_exporter.py | 4 +- .../tests/test_otlp_trace_exporter.py | 4 +- .../test-requirements-1.txt | 28 - ...quirements-0.txt => test-requirements.txt} | 4 +- .../test-requirements.txt | 1 - gen-requirements.txt | 11 +- opentelemetry-proto/pyproject.toml | 2 +- .../collector/logs/v1/logs_service_pb2.py | 78 +- .../collector/logs/v1/logs_service_pb2.pyi | 115 +- .../logs/v1/logs_service_pb2_grpc.py | 98 +- .../metrics/v1/metrics_service_pb2.py | 78 +- .../metrics/v1/metrics_service_pb2.pyi | 115 +- .../metrics/v1/metrics_service_pb2_grpc.py | 98 +- .../collector/trace/v1/trace_service_pb2.py | 78 +- .../collector/trace/v1/trace_service_pb2.pyi | 115 +- .../trace/v1/trace_service_pb2_grpc.py | 98 +- .../proto/common/v1/common_pb2.py | 89 +- .../proto/common/v1/common_pb2.pyi | 244 ++- .../opentelemetry/proto/logs/v1/logs_pb2.py | 127 +- .../opentelemetry/proto/logs/v1/logs_pb2.pyi | 419 +++-- .../proto/metrics/v1/metrics_pb2.py | 251 +-- .../proto/metrics/v1/metrics_pb2.pyi | 1147 ++++++++---- .../proto/resource/v1/resource_pb2.py | 41 +- .../proto/resource/v1/resource_pb2.pyi | 65 +- .../opentelemetry/proto/trace/v1/trace_pb2.py | 146 +- .../proto/trace/v1/trace_pb2.pyi | 603 ++++--- opentelemetry-proto/test-requirements-1.txt | 14 - ...quirements-0.txt => test-requirements.txt} | 2 +- tox.ini | 104 +- 41 files changed, 3064 insertions(+), 2798 deletions(-) delete mode 100644 exporter/opentelemetry-exporter-otlp-proto-common/test-requirements-1.txt rename exporter/opentelemetry-exporter-otlp-proto-common/{test-requirements-0.txt => test-requirements.txt} (95%) delete mode 100644 exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements-1.txt rename exporter/opentelemetry-exporter-otlp-proto-grpc/{test-requirements-0.txt => test-requirements.txt} (87%) delete mode 100644 exporter/opentelemetry-exporter-otlp-proto-http/test-requirements-1.txt rename exporter/opentelemetry-exporter-otlp-proto-http/{test-requirements-0.txt => test-requirements.txt} (91%) delete mode 100644 opentelemetry-proto/test-requirements-1.txt rename opentelemetry-proto/{test-requirements-0.txt => test-requirements.txt} (93%) diff --git a/.github/workflows/lint_0.yml b/.github/workflows/lint_0.yml index 00346652985..71b148c02d5 100644 --- a/.github/workflows/lint_0.yml +++ b/.github/workflows/lint_0.yml @@ -34,8 +34,8 @@ jobs: - name: Run tests run: tox -e lint-opentelemetry-api - lint-opentelemetry-proto: - name: opentelemetry-proto + lint-opentelemetry-proto-protobuf5: + name: opentelemetry-proto-protobuf5 runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -50,7 +50,7 @@ jobs: run: pip install tox - name: Run tests - run: tox -e lint-opentelemetry-proto + run: tox -e lint-opentelemetry-proto-protobuf5 lint-opentelemetry-sdk: name: opentelemetry-sdk diff --git a/.github/workflows/misc_0.yml b/.github/workflows/misc_0.yml index fbb06fd4743..c94e44fe9a6 100644 --- a/.github/workflows/misc_0.yml +++ b/.github/workflows/misc_0.yml @@ -124,8 +124,8 @@ jobs: - name: Run tests run: tox -e docs - docker-tests-proto3: - name: docker-tests-proto3 + docker-tests-otlpexporter: + name: docker-tests-otlpexporter runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -140,10 +140,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e docker-tests-proto3 + run: tox -e docker-tests-otlpexporter - docker-tests-proto4: - name: docker-tests-proto4 + docker-tests-opencensus: + name: docker-tests-opencensus runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -158,7 +158,7 @@ jobs: run: pip install tox - name: Run tests - run: tox -e docker-tests-proto4 + run: tox -e docker-tests-opencensus public-symbols-check: name: public-symbols-check diff --git a/.github/workflows/test_0.yml b/.github/workflows/test_0.yml index 48c751e7908..dfbced1b397 100644 --- a/.github/workflows/test_0.yml +++ b/.github/workflows/test_0.yml @@ -124,8 +124,8 @@ jobs: - name: Run tests run: tox -e pypy3-test-opentelemetry-api -- -ra - py38-test-opentelemetry-proto-0_ubuntu-latest: - name: opentelemetry-proto-0 3.8 Ubuntu + py38-test-opentelemetry-proto-protobuf5_ubuntu-latest: + name: opentelemetry-proto-protobuf5 3.8 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -140,46 +140,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py38-test-opentelemetry-proto-0 -- -ra + run: tox -e py38-test-opentelemetry-proto-protobuf5 -- -ra - py38-test-opentelemetry-proto-1_ubuntu-latest: - name: opentelemetry-proto-1 3.8 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.8 - uses: actions/setup-python@v5 - with: - python-version: "3.8" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py38-test-opentelemetry-proto-1 -- -ra - - py39-test-opentelemetry-proto-0_ubuntu-latest: - name: opentelemetry-proto-0 3.9 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.9 - uses: actions/setup-python@v5 - with: - python-version: "3.9" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py39-test-opentelemetry-proto-0 -- -ra - - py39-test-opentelemetry-proto-1_ubuntu-latest: - name: opentelemetry-proto-1 3.9 Ubuntu + py39-test-opentelemetry-proto-protobuf5_ubuntu-latest: + name: opentelemetry-proto-protobuf5 3.9 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -194,28 +158,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py39-test-opentelemetry-proto-1 -- -ra - - py310-test-opentelemetry-proto-0_ubuntu-latest: - name: opentelemetry-proto-0 3.10 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.10 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py310-test-opentelemetry-proto-0 -- -ra + run: tox -e py39-test-opentelemetry-proto-protobuf5 -- -ra - py310-test-opentelemetry-proto-1_ubuntu-latest: - name: opentelemetry-proto-1 3.10 Ubuntu + py310-test-opentelemetry-proto-protobuf5_ubuntu-latest: + name: opentelemetry-proto-protobuf5 3.10 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -230,28 +176,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py310-test-opentelemetry-proto-1 -- -ra - - py311-test-opentelemetry-proto-0_ubuntu-latest: - name: opentelemetry-proto-0 3.11 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py311-test-opentelemetry-proto-0 -- -ra + run: tox -e py310-test-opentelemetry-proto-protobuf5 -- -ra - py311-test-opentelemetry-proto-1_ubuntu-latest: - name: opentelemetry-proto-1 3.11 Ubuntu + py311-test-opentelemetry-proto-protobuf5_ubuntu-latest: + name: opentelemetry-proto-protobuf5 3.11 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -266,28 +194,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py311-test-opentelemetry-proto-1 -- -ra - - py312-test-opentelemetry-proto-0_ubuntu-latest: - name: opentelemetry-proto-0 3.12 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.12 - uses: actions/setup-python@v5 - with: - python-version: "3.12" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py312-test-opentelemetry-proto-0 -- -ra + run: tox -e py311-test-opentelemetry-proto-protobuf5 -- -ra - py312-test-opentelemetry-proto-1_ubuntu-latest: - name: opentelemetry-proto-1 3.12 Ubuntu + py312-test-opentelemetry-proto-protobuf5_ubuntu-latest: + name: opentelemetry-proto-protobuf5 3.12 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -302,28 +212,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py312-test-opentelemetry-proto-1 -- -ra - - pypy3-test-opentelemetry-proto-0_ubuntu-latest: - name: opentelemetry-proto-0 pypy-3.8 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python pypy-3.8 - uses: actions/setup-python@v5 - with: - python-version: "pypy-3.8" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e pypy3-test-opentelemetry-proto-0 -- -ra + run: tox -e py312-test-opentelemetry-proto-protobuf5 -- -ra - pypy3-test-opentelemetry-proto-1_ubuntu-latest: - name: opentelemetry-proto-1 pypy-3.8 Ubuntu + pypy3-test-opentelemetry-proto-protobuf5_ubuntu-latest: + name: opentelemetry-proto-protobuf5 pypy-3.8 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -338,7 +230,7 @@ jobs: run: pip install tox - name: Run tests - run: tox -e pypy3-test-opentelemetry-proto-1 -- -ra + run: tox -e pypy3-test-opentelemetry-proto-protobuf5 -- -ra py38-test-opentelemetry-sdk_ubuntu-latest: name: opentelemetry-sdk 3.8 Ubuntu @@ -934,26 +826,8 @@ jobs: - name: Run tests run: tox -e py312-test-opentelemetry-exporter-opencensus -- -ra - py38-test-opentelemetry-exporter-otlp-proto-common-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-0 3.8 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.8 - uses: actions/setup-python@v5 - with: - python-version: "3.8" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra - - py38-test-opentelemetry-exporter-otlp-proto-common-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-1 3.8 Ubuntu + py38-test-opentelemetry-exporter-otlp-proto-common_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-common 3.8 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -968,10 +842,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra + run: tox -e py38-test-opentelemetry-exporter-otlp-proto-common -- -ra - py39-test-opentelemetry-exporter-otlp-proto-common-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-0 3.9 Ubuntu + py39-test-opentelemetry-exporter-otlp-proto-common_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-common 3.9 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -986,172 +860,172 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra + run: tox -e py39-test-opentelemetry-exporter-otlp-proto-common -- -ra - py39-test-opentelemetry-exporter-otlp-proto-common-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-1 3.9 Ubuntu + py310-test-opentelemetry-exporter-otlp-proto-common_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-common 3.10 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.10" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra + run: tox -e py310-test-opentelemetry-exporter-otlp-proto-common -- -ra - py310-test-opentelemetry-exporter-otlp-proto-common-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-0 3.10 Ubuntu + py311-test-opentelemetry-exporter-otlp-proto-common_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-common 3.11 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.11" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra + run: tox -e py311-test-opentelemetry-exporter-otlp-proto-common -- -ra - py310-test-opentelemetry-exporter-otlp-proto-common-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-1 3.10 Ubuntu + py312-test-opentelemetry-exporter-otlp-proto-common_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-common 3.12 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python 3.12 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.12" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra + run: tox -e py312-test-opentelemetry-exporter-otlp-proto-common -- -ra - py311-test-opentelemetry-exporter-otlp-proto-common-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-0 3.11 Ubuntu + pypy3-test-opentelemetry-exporter-otlp-proto-common_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-common pypy-3.8 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python pypy-3.8 uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "pypy-3.8" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra + run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-common -- -ra - py311-test-opentelemetry-exporter-otlp-proto-common-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-1 3.11 Ubuntu + py38-test-opentelemetry-exporter-otlp-combined_ubuntu-latest: + name: opentelemetry-exporter-otlp-combined 3.8 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.8 uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.8" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra + run: tox -e py38-test-opentelemetry-exporter-otlp-combined -- -ra - py312-test-opentelemetry-exporter-otlp-proto-common-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-0 3.12 Ubuntu + py39-test-opentelemetry-exporter-otlp-combined_ubuntu-latest: + name: opentelemetry-exporter-otlp-combined 3.9 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.9 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.9" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra + run: tox -e py39-test-opentelemetry-exporter-otlp-combined -- -ra - py312-test-opentelemetry-exporter-otlp-proto-common-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-1 3.12 Ubuntu + py310-test-opentelemetry-exporter-otlp-combined_ubuntu-latest: + name: opentelemetry-exporter-otlp-combined 3.10 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.10" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra + run: tox -e py310-test-opentelemetry-exporter-otlp-combined -- -ra - pypy3-test-opentelemetry-exporter-otlp-proto-common-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-0 pypy-3.8 Ubuntu + py311-test-opentelemetry-exporter-otlp-combined_ubuntu-latest: + name: opentelemetry-exporter-otlp-combined 3.11 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python pypy-3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: - python-version: "pypy-3.8" + python-version: "3.11" - name: Install tox run: pip install tox - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra + run: tox -e py311-test-opentelemetry-exporter-otlp-combined -- -ra - pypy3-test-opentelemetry-exporter-otlp-proto-common-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-common-1 pypy-3.8 Ubuntu + py312-test-opentelemetry-exporter-otlp-combined_ubuntu-latest: + name: opentelemetry-exporter-otlp-combined 3.12 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python pypy-3.8 + - name: Set up Python 3.12 uses: actions/setup-python@v5 with: - python-version: "pypy-3.8" + python-version: "3.12" - name: Install tox run: pip install tox - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra + run: tox -e py312-test-opentelemetry-exporter-otlp-combined -- -ra - py38-test-opentelemetry-exporter-otlp-combined_ubuntu-latest: - name: opentelemetry-exporter-otlp-combined 3.8 Ubuntu + py38-test-opentelemetry-exporter-otlp-proto-grpc_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-grpc 3.8 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -1166,10 +1040,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-combined -- -ra + run: tox -e py38-test-opentelemetry-exporter-otlp-proto-grpc -- -ra - py39-test-opentelemetry-exporter-otlp-combined_ubuntu-latest: - name: opentelemetry-exporter-otlp-combined 3.9 Ubuntu + py39-test-opentelemetry-exporter-otlp-proto-grpc_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-grpc 3.9 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -1184,10 +1058,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-combined -- -ra + run: tox -e py39-test-opentelemetry-exporter-otlp-proto-grpc -- -ra - py310-test-opentelemetry-exporter-otlp-combined_ubuntu-latest: - name: opentelemetry-exporter-otlp-combined 3.10 Ubuntu + py310-test-opentelemetry-exporter-otlp-proto-grpc_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-grpc 3.10 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -1202,10 +1076,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-combined -- -ra + run: tox -e py310-test-opentelemetry-exporter-otlp-proto-grpc -- -ra - py311-test-opentelemetry-exporter-otlp-combined_ubuntu-latest: - name: opentelemetry-exporter-otlp-combined 3.11 Ubuntu + py311-test-opentelemetry-exporter-otlp-proto-grpc_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-grpc 3.11 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -1220,10 +1094,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-combined -- -ra + run: tox -e py311-test-opentelemetry-exporter-otlp-proto-grpc -- -ra - py312-test-opentelemetry-exporter-otlp-combined_ubuntu-latest: - name: opentelemetry-exporter-otlp-combined 3.12 Ubuntu + py312-test-opentelemetry-exporter-otlp-proto-grpc_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-grpc 3.12 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -1238,10 +1112,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-combined -- -ra + run: tox -e py312-test-opentelemetry-exporter-otlp-proto-grpc -- -ra - py38-test-opentelemetry-exporter-otlp-proto-grpc-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-grpc-0 3.8 Ubuntu + py38-test-opentelemetry-exporter-otlp-proto-http_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-http 3.8 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -1256,226 +1130,226 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-grpc-0 -- -ra + run: tox -e py38-test-opentelemetry-exporter-otlp-proto-http -- -ra - py38-test-opentelemetry-exporter-otlp-proto-grpc-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-grpc-1 3.8 Ubuntu + py39-test-opentelemetry-exporter-otlp-proto-http_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-http 3.9 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-grpc-1 -- -ra + run: tox -e py39-test-opentelemetry-exporter-otlp-proto-http -- -ra - py39-test-opentelemetry-exporter-otlp-proto-grpc-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-grpc-0 3.9 Ubuntu + py310-test-opentelemetry-exporter-otlp-proto-http_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-http 3.10 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.10" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-grpc-0 -- -ra + run: tox -e py310-test-opentelemetry-exporter-otlp-proto-http -- -ra - py39-test-opentelemetry-exporter-otlp-proto-grpc-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-grpc-1 3.9 Ubuntu + py311-test-opentelemetry-exporter-otlp-proto-http_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-http 3.11 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.11" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-grpc-1 -- -ra + run: tox -e py311-test-opentelemetry-exporter-otlp-proto-http -- -ra - py310-test-opentelemetry-exporter-otlp-proto-grpc-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-grpc-0 3.10 Ubuntu + py312-test-opentelemetry-exporter-otlp-proto-http_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-http 3.12 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python 3.12 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.12" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-grpc-0 -- -ra + run: tox -e py312-test-opentelemetry-exporter-otlp-proto-http -- -ra - py310-test-opentelemetry-exporter-otlp-proto-grpc-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-grpc-1 3.10 Ubuntu + pypy3-test-opentelemetry-exporter-otlp-proto-http_ubuntu-latest: + name: opentelemetry-exporter-otlp-proto-http pypy-3.8 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python pypy-3.8 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "pypy-3.8" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-grpc-1 -- -ra + run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-http -- -ra - py311-test-opentelemetry-exporter-otlp-proto-grpc-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-grpc-0 3.11 Ubuntu + py38-test-opentelemetry-exporter-prometheus_ubuntu-latest: + name: opentelemetry-exporter-prometheus 3.8 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.8 uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.8" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-grpc-0 -- -ra + run: tox -e py38-test-opentelemetry-exporter-prometheus -- -ra - py311-test-opentelemetry-exporter-otlp-proto-grpc-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-grpc-1 3.11 Ubuntu + py39-test-opentelemetry-exporter-prometheus_ubuntu-latest: + name: opentelemetry-exporter-prometheus 3.9 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.9 uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.9" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-grpc-1 -- -ra + run: tox -e py39-test-opentelemetry-exporter-prometheus -- -ra - py312-test-opentelemetry-exporter-otlp-proto-grpc-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-grpc-0 3.12 Ubuntu + py310-test-opentelemetry-exporter-prometheus_ubuntu-latest: + name: opentelemetry-exporter-prometheus 3.10 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.10" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-grpc-0 -- -ra + run: tox -e py310-test-opentelemetry-exporter-prometheus -- -ra - py312-test-opentelemetry-exporter-otlp-proto-grpc-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-grpc-1 3.12 Ubuntu + py311-test-opentelemetry-exporter-prometheus_ubuntu-latest: + name: opentelemetry-exporter-prometheus 3.11 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.11" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-grpc-1 -- -ra + run: tox -e py311-test-opentelemetry-exporter-prometheus -- -ra - py38-test-opentelemetry-exporter-otlp-proto-http-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-0 3.8 Ubuntu + py312-test-opentelemetry-exporter-prometheus_ubuntu-latest: + name: opentelemetry-exporter-prometheus 3.12 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.8 + - name: Set up Python 3.12 uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.12" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra + run: tox -e py312-test-opentelemetry-exporter-prometheus -- -ra - py38-test-opentelemetry-exporter-otlp-proto-http-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-1 3.8 Ubuntu + pypy3-test-opentelemetry-exporter-prometheus_ubuntu-latest: + name: opentelemetry-exporter-prometheus pypy-3.8 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.8 + - name: Set up Python pypy-3.8 uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "pypy-3.8" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra + run: tox -e pypy3-test-opentelemetry-exporter-prometheus -- -ra - py39-test-opentelemetry-exporter-otlp-proto-http-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-0 3.9 Ubuntu + py38-test-opentelemetry-exporter-zipkin-combined_ubuntu-latest: + name: opentelemetry-exporter-zipkin-combined 3.8 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.8 uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.8" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra + run: tox -e py38-test-opentelemetry-exporter-zipkin-combined -- -ra - py39-test-opentelemetry-exporter-otlp-proto-http-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-1 3.9 Ubuntu + py39-test-opentelemetry-exporter-zipkin-combined_ubuntu-latest: + name: opentelemetry-exporter-zipkin-combined 3.9 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -1490,10 +1364,10 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra + run: tox -e py39-test-opentelemetry-exporter-zipkin-combined -- -ra - py310-test-opentelemetry-exporter-otlp-proto-http-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-0 3.10 Ubuntu + py310-test-opentelemetry-exporter-zipkin-combined_ubuntu-latest: + name: opentelemetry-exporter-zipkin-combined 3.10 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -1508,316 +1382,28 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra + run: tox -e py310-test-opentelemetry-exporter-zipkin-combined -- -ra - py310-test-opentelemetry-exporter-otlp-proto-http-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-1 3.10 Ubuntu + py311-test-opentelemetry-exporter-zipkin-combined_ubuntu-latest: + name: opentelemetry-exporter-zipkin-combined 3.11 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.11" - name: Install tox run: pip install tox - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra + run: tox -e py311-test-opentelemetry-exporter-zipkin-combined -- -ra - py311-test-opentelemetry-exporter-otlp-proto-http-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-0 3.11 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra - - py311-test-opentelemetry-exporter-otlp-proto-http-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-1 3.11 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra - - py312-test-opentelemetry-exporter-otlp-proto-http-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-0 3.12 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.12 - uses: actions/setup-python@v5 - with: - python-version: "3.12" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra - - py312-test-opentelemetry-exporter-otlp-proto-http-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-1 3.12 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.12 - uses: actions/setup-python@v5 - with: - python-version: "3.12" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra - - pypy3-test-opentelemetry-exporter-otlp-proto-http-0_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-0 pypy-3.8 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python pypy-3.8 - uses: actions/setup-python@v5 - with: - python-version: "pypy-3.8" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra - - pypy3-test-opentelemetry-exporter-otlp-proto-http-1_ubuntu-latest: - name: opentelemetry-exporter-otlp-proto-http-1 pypy-3.8 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python pypy-3.8 - uses: actions/setup-python@v5 - with: - python-version: "pypy-3.8" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra - - py38-test-opentelemetry-exporter-prometheus_ubuntu-latest: - name: opentelemetry-exporter-prometheus 3.8 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.8 - uses: actions/setup-python@v5 - with: - python-version: "3.8" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-prometheus -- -ra - - py39-test-opentelemetry-exporter-prometheus_ubuntu-latest: - name: opentelemetry-exporter-prometheus 3.9 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.9 - uses: actions/setup-python@v5 - with: - python-version: "3.9" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-prometheus -- -ra - - py310-test-opentelemetry-exporter-prometheus_ubuntu-latest: - name: opentelemetry-exporter-prometheus 3.10 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.10 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-prometheus -- -ra - - py311-test-opentelemetry-exporter-prometheus_ubuntu-latest: - name: opentelemetry-exporter-prometheus 3.11 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-prometheus -- -ra - - py312-test-opentelemetry-exporter-prometheus_ubuntu-latest: - name: opentelemetry-exporter-prometheus 3.12 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.12 - uses: actions/setup-python@v5 - with: - python-version: "3.12" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-prometheus -- -ra - - pypy3-test-opentelemetry-exporter-prometheus_ubuntu-latest: - name: opentelemetry-exporter-prometheus pypy-3.8 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python pypy-3.8 - uses: actions/setup-python@v5 - with: - python-version: "pypy-3.8" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-prometheus -- -ra - - py38-test-opentelemetry-exporter-zipkin-combined_ubuntu-latest: - name: opentelemetry-exporter-zipkin-combined 3.8 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.8 - uses: actions/setup-python@v5 - with: - python-version: "3.8" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-zipkin-combined -- -ra - - py39-test-opentelemetry-exporter-zipkin-combined_ubuntu-latest: - name: opentelemetry-exporter-zipkin-combined 3.9 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.9 - uses: actions/setup-python@v5 - with: - python-version: "3.9" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-zipkin-combined -- -ra - - py310-test-opentelemetry-exporter-zipkin-combined_ubuntu-latest: - name: opentelemetry-exporter-zipkin-combined 3.10 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.10 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-zipkin-combined -- -ra - - py311-test-opentelemetry-exporter-zipkin-combined_ubuntu-latest: - name: opentelemetry-exporter-zipkin-combined 3.11 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-zipkin-combined -- -ra - - py312-test-opentelemetry-exporter-zipkin-combined_ubuntu-latest: - name: opentelemetry-exporter-zipkin-combined 3.12 Ubuntu + py312-test-opentelemetry-exporter-zipkin-combined_ubuntu-latest: + name: opentelemetry-exporter-zipkin-combined 3.12 Ubuntu runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -2319,124 +1905,10 @@ jobs: - name: Run tests run: tox -e py39-test-opentelemetry-test-utils -- -ra - - py310-test-opentelemetry-test-utils_ubuntu-latest: - name: opentelemetry-test-utils 3.10 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.10 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py310-test-opentelemetry-test-utils -- -ra - - py311-test-opentelemetry-test-utils_ubuntu-latest: - name: opentelemetry-test-utils 3.11 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py311-test-opentelemetry-test-utils -- -ra - - py312-test-opentelemetry-test-utils_ubuntu-latest: - name: opentelemetry-test-utils 3.12 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.12 - uses: actions/setup-python@v5 - with: - python-version: "3.12" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py312-test-opentelemetry-test-utils -- -ra - - pypy3-test-opentelemetry-test-utils_ubuntu-latest: - name: opentelemetry-test-utils pypy-3.8 Ubuntu - runs-on: ubuntu-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python pypy-3.8 - uses: actions/setup-python@v5 - with: - python-version: "pypy-3.8" - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e pypy3-test-opentelemetry-test-utils -- -ra - - py38-test-opentelemetry-api_windows-latest: - name: opentelemetry-api 3.8 Windows - runs-on: windows-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.8 - uses: actions/setup-python@v5 - with: - python-version: "3.8" - - - name: Install tox - run: pip install tox - - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - - name: Run tests - run: tox -e py38-test-opentelemetry-api -- -ra - - py39-test-opentelemetry-api_windows-latest: - name: opentelemetry-api 3.9 Windows - runs-on: windows-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.9 - uses: actions/setup-python@v5 - with: - python-version: "3.9" - - - name: Install tox - run: pip install tox - - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - - name: Run tests - run: tox -e py39-test-opentelemetry-api -- -ra - - py310-test-opentelemetry-api_windows-latest: - name: opentelemetry-api 3.10 Windows - runs-on: windows-latest + + py310-test-opentelemetry-test-utils_ubuntu-latest: + name: opentelemetry-test-utils 3.10 Ubuntu + runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 @@ -2449,15 +1921,12 @@ jobs: - name: Install tox run: pip install tox - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - name: Run tests - run: tox -e py310-test-opentelemetry-api -- -ra + run: tox -e py310-test-opentelemetry-test-utils -- -ra - py311-test-opentelemetry-api_windows-latest: - name: opentelemetry-api 3.11 Windows - runs-on: windows-latest + py311-test-opentelemetry-test-utils_ubuntu-latest: + name: opentelemetry-test-utils 3.11 Ubuntu + runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 @@ -2470,15 +1939,12 @@ jobs: - name: Install tox run: pip install tox - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - name: Run tests - run: tox -e py311-test-opentelemetry-api -- -ra + run: tox -e py311-test-opentelemetry-test-utils -- -ra - py312-test-opentelemetry-api_windows-latest: - name: opentelemetry-api 3.12 Windows - runs-on: windows-latest + py312-test-opentelemetry-test-utils_ubuntu-latest: + name: opentelemetry-test-utils 3.12 Ubuntu + runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 @@ -2491,15 +1957,12 @@ jobs: - name: Install tox run: pip install tox - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - name: Run tests - run: tox -e py312-test-opentelemetry-api -- -ra + run: tox -e py312-test-opentelemetry-test-utils -- -ra - pypy3-test-opentelemetry-api_windows-latest: - name: opentelemetry-api pypy-3.8 Windows - runs-on: windows-latest + pypy3-test-opentelemetry-test-utils_ubuntu-latest: + name: opentelemetry-test-utils pypy-3.8 Ubuntu + runs-on: ubuntu-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 @@ -2512,14 +1975,11 @@ jobs: - name: Install tox run: pip install tox - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - name: Run tests - run: tox -e pypy3-test-opentelemetry-api -- -ra + run: tox -e pypy3-test-opentelemetry-test-utils -- -ra - py38-test-opentelemetry-proto-0_windows-latest: - name: opentelemetry-proto-0 3.8 Windows + py38-test-opentelemetry-api_windows-latest: + name: opentelemetry-api 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -2537,19 +1997,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-proto-0 -- -ra + run: tox -e py38-test-opentelemetry-api -- -ra - py38-test-opentelemetry-proto-1_windows-latest: - name: opentelemetry-proto-1 3.8 Windows + py39-test-opentelemetry-api_windows-latest: + name: opentelemetry-api 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" - name: Install tox run: pip install tox @@ -2558,19 +2018,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-proto-1 -- -ra + run: tox -e py39-test-opentelemetry-api -- -ra - py39-test-opentelemetry-proto-0_windows-latest: - name: opentelemetry-proto-0 3.9 Windows + py310-test-opentelemetry-api_windows-latest: + name: opentelemetry-api 3.10 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.10" - name: Install tox run: pip install tox @@ -2579,19 +2039,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-proto-0 -- -ra + run: tox -e py310-test-opentelemetry-api -- -ra - py39-test-opentelemetry-proto-1_windows-latest: - name: opentelemetry-proto-1 3.9 Windows + py311-test-opentelemetry-api_windows-latest: + name: opentelemetry-api 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.11" - name: Install tox run: pip install tox @@ -2600,19 +2060,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-proto-1 -- -ra + run: tox -e py311-test-opentelemetry-api -- -ra - py310-test-opentelemetry-proto-0_windows-latest: - name: opentelemetry-proto-0 3.10 Windows + py312-test-opentelemetry-api_windows-latest: + name: opentelemetry-api 3.12 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python 3.12 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.12" - name: Install tox run: pip install tox @@ -2621,19 +2081,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-proto-0 -- -ra + run: tox -e py312-test-opentelemetry-api -- -ra - py310-test-opentelemetry-proto-1_windows-latest: - name: opentelemetry-proto-1 3.10 Windows + pypy3-test-opentelemetry-api_windows-latest: + name: opentelemetry-api pypy-3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python pypy-3.8 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "pypy-3.8" - name: Install tox run: pip install tox @@ -2642,19 +2102,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-proto-1 -- -ra + run: tox -e pypy3-test-opentelemetry-api -- -ra - py311-test-opentelemetry-proto-0_windows-latest: - name: opentelemetry-proto-0 3.11 Windows + py38-test-opentelemetry-proto-protobuf5_windows-latest: + name: opentelemetry-proto-protobuf5 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.8 uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.8" - name: Install tox run: pip install tox @@ -2663,19 +2123,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-proto-0 -- -ra + run: tox -e py38-test-opentelemetry-proto-protobuf5 -- -ra - py311-test-opentelemetry-proto-1_windows-latest: - name: opentelemetry-proto-1 3.11 Windows + py39-test-opentelemetry-proto-protobuf5_windows-latest: + name: opentelemetry-proto-protobuf5 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.9 uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.9" - name: Install tox run: pip install tox @@ -2684,19 +2144,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-proto-1 -- -ra + run: tox -e py39-test-opentelemetry-proto-protobuf5 -- -ra - py312-test-opentelemetry-proto-0_windows-latest: - name: opentelemetry-proto-0 3.12 Windows + py310-test-opentelemetry-proto-protobuf5_windows-latest: + name: opentelemetry-proto-protobuf5 3.10 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.10" - name: Install tox run: pip install tox @@ -2705,19 +2165,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py312-test-opentelemetry-proto-0 -- -ra + run: tox -e py310-test-opentelemetry-proto-protobuf5 -- -ra - py312-test-opentelemetry-proto-1_windows-latest: - name: opentelemetry-proto-1 3.12 Windows + py311-test-opentelemetry-proto-protobuf5_windows-latest: + name: opentelemetry-proto-protobuf5 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.11" - name: Install tox run: pip install tox @@ -2726,19 +2186,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py312-test-opentelemetry-proto-1 -- -ra + run: tox -e py311-test-opentelemetry-proto-protobuf5 -- -ra - pypy3-test-opentelemetry-proto-0_windows-latest: - name: opentelemetry-proto-0 pypy-3.8 Windows + py312-test-opentelemetry-proto-protobuf5_windows-latest: + name: opentelemetry-proto-protobuf5 3.12 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python pypy-3.8 + - name: Set up Python 3.12 uses: actions/setup-python@v5 with: - python-version: "pypy-3.8" + python-version: "3.12" - name: Install tox run: pip install tox @@ -2747,10 +2207,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e pypy3-test-opentelemetry-proto-0 -- -ra + run: tox -e py312-test-opentelemetry-proto-protobuf5 -- -ra - pypy3-test-opentelemetry-proto-1_windows-latest: - name: opentelemetry-proto-1 pypy-3.8 Windows + pypy3-test-opentelemetry-proto-protobuf5_windows-latest: + name: opentelemetry-proto-protobuf5 pypy-3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -2768,7 +2228,7 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e pypy3-test-opentelemetry-proto-1 -- -ra + run: tox -e pypy3-test-opentelemetry-proto-protobuf5 -- -ra py38-test-opentelemetry-sdk_windows-latest: name: opentelemetry-sdk 3.8 Windows @@ -3463,29 +2923,8 @@ jobs: - name: Run tests run: tox -e py312-test-opentelemetry-exporter-opencensus -- -ra - py38-test-opentelemetry-exporter-otlp-proto-common-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-0 3.8 Windows - runs-on: windows-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.8 - uses: actions/setup-python@v5 - with: - python-version: "3.8" - - - name: Install tox - run: pip install tox - - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra - - py38-test-opentelemetry-exporter-otlp-proto-common-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-1 3.8 Windows + py38-test-opentelemetry-exporter-otlp-proto-common_windows-latest: + name: opentelemetry-exporter-otlp-proto-common 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -3503,31 +2942,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra - - py39-test-opentelemetry-exporter-otlp-proto-common-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-0 3.9 Windows - runs-on: windows-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.9 - uses: actions/setup-python@v5 - with: - python-version: "3.9" - - - name: Install tox - run: pip install tox - - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra + run: tox -e py38-test-opentelemetry-exporter-otlp-proto-common -- -ra - py39-test-opentelemetry-exporter-otlp-proto-common-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-1 3.9 Windows + py39-test-opentelemetry-exporter-otlp-proto-common_windows-latest: + name: opentelemetry-exporter-otlp-proto-common 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -3545,31 +2963,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra - - py310-test-opentelemetry-exporter-otlp-proto-common-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-0 3.10 Windows - runs-on: windows-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.10 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Install tox - run: pip install tox - - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra + run: tox -e py39-test-opentelemetry-exporter-otlp-proto-common -- -ra - py310-test-opentelemetry-exporter-otlp-proto-common-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-1 3.10 Windows + py310-test-opentelemetry-exporter-otlp-proto-common_windows-latest: + name: opentelemetry-exporter-otlp-proto-common 3.10 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -3587,31 +2984,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra - - py311-test-opentelemetry-exporter-otlp-proto-common-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-0 3.11 Windows - runs-on: windows-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install tox - run: pip install tox - - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra + run: tox -e py310-test-opentelemetry-exporter-otlp-proto-common -- -ra - py311-test-opentelemetry-exporter-otlp-proto-common-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-1 3.11 Windows + py311-test-opentelemetry-exporter-otlp-proto-common_windows-latest: + name: opentelemetry-exporter-otlp-proto-common 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -3629,31 +3005,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra - - py312-test-opentelemetry-exporter-otlp-proto-common-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-0 3.12 Windows - runs-on: windows-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python 3.12 - uses: actions/setup-python@v5 - with: - python-version: "3.12" - - - name: Install tox - run: pip install tox - - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra + run: tox -e py311-test-opentelemetry-exporter-otlp-proto-common -- -ra - py312-test-opentelemetry-exporter-otlp-proto-common-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-1 3.12 Windows + py312-test-opentelemetry-exporter-otlp-proto-common_windows-latest: + name: opentelemetry-exporter-otlp-proto-common 3.12 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -3671,31 +3026,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra - - pypy3-test-opentelemetry-exporter-otlp-proto-common-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-0 pypy-3.8 Windows - runs-on: windows-latest - steps: - - name: Checkout repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - name: Set up Python pypy-3.8 - uses: actions/setup-python@v5 - with: - python-version: "pypy-3.8" - - - name: Install tox - run: pip install tox - - - name: Configure git to support long filenames - run: git config --system core.longpaths true - - - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-common-0 -- -ra + run: tox -e py312-test-opentelemetry-exporter-otlp-proto-common -- -ra - pypy3-test-opentelemetry-exporter-otlp-proto-common-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-common-1 pypy-3.8 Windows + pypy3-test-opentelemetry-exporter-otlp-proto-common_windows-latest: + name: opentelemetry-exporter-otlp-proto-common pypy-3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -3713,7 +3047,7 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-common-1 -- -ra + run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-common -- -ra py38-test-opentelemetry-exporter-otlp-combined_windows-latest: name: opentelemetry-exporter-otlp-combined 3.8 Windows @@ -3820,8 +3154,8 @@ jobs: - name: Run tests run: tox -e py312-test-opentelemetry-exporter-otlp-combined -- -ra - py38-test-opentelemetry-exporter-otlp-proto-grpc-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-grpc-0 3.8 Windows + py38-test-opentelemetry-exporter-otlp-proto-grpc_windows-latest: + name: opentelemetry-exporter-otlp-proto-grpc 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -3839,19 +3173,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-grpc-0 -- -ra + run: tox -e py38-test-opentelemetry-exporter-otlp-proto-grpc -- -ra - py38-test-opentelemetry-exporter-otlp-proto-grpc-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-grpc-1 3.8 Windows + py39-test-opentelemetry-exporter-otlp-proto-grpc_windows-latest: + name: opentelemetry-exporter-otlp-proto-grpc 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" - name: Install tox run: pip install tox @@ -3860,19 +3194,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-grpc-1 -- -ra + run: tox -e py39-test-opentelemetry-exporter-otlp-proto-grpc -- -ra - py39-test-opentelemetry-exporter-otlp-proto-grpc-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-grpc-0 3.9 Windows + py310-test-opentelemetry-exporter-otlp-proto-grpc_windows-latest: + name: opentelemetry-exporter-otlp-proto-grpc 3.10 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.10" - name: Install tox run: pip install tox @@ -3881,19 +3215,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-grpc-0 -- -ra + run: tox -e py310-test-opentelemetry-exporter-otlp-proto-grpc -- -ra - py39-test-opentelemetry-exporter-otlp-proto-grpc-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-grpc-1 3.9 Windows + py311-test-opentelemetry-exporter-otlp-proto-grpc_windows-latest: + name: opentelemetry-exporter-otlp-proto-grpc 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.11" - name: Install tox run: pip install tox @@ -3902,19 +3236,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-grpc-1 -- -ra + run: tox -e py311-test-opentelemetry-exporter-otlp-proto-grpc -- -ra - py310-test-opentelemetry-exporter-otlp-proto-grpc-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-grpc-0 3.10 Windows + py312-test-opentelemetry-exporter-otlp-proto-grpc_windows-latest: + name: opentelemetry-exporter-otlp-proto-grpc 3.12 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python 3.12 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.12" - name: Install tox run: pip install tox @@ -3923,19 +3257,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-grpc-0 -- -ra + run: tox -e py312-test-opentelemetry-exporter-otlp-proto-grpc -- -ra - py310-test-opentelemetry-exporter-otlp-proto-grpc-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-grpc-1 3.10 Windows + py38-test-opentelemetry-exporter-otlp-proto-http_windows-latest: + name: opentelemetry-exporter-otlp-proto-http 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python 3.8 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.8" - name: Install tox run: pip install tox @@ -3944,19 +3278,40 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-grpc-1 -- -ra + run: tox -e py38-test-opentelemetry-exporter-otlp-proto-http -- -ra - py311-test-opentelemetry-exporter-otlp-proto-grpc-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-grpc-0 3.11 Windows + py39-test-opentelemetry-exporter-otlp-proto-http_windows-latest: + name: opentelemetry-exporter-otlp-proto-http 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.9 uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.9" + + - name: Install tox + run: pip install tox + + - name: Configure git to support long filenames + run: git config --system core.longpaths true + + - name: Run tests + run: tox -e py39-test-opentelemetry-exporter-otlp-proto-http -- -ra + + py310-test-opentelemetry-exporter-otlp-proto-http_windows-latest: + name: opentelemetry-exporter-otlp-proto-http 3.10 Windows + runs-on: windows-latest + steps: + - name: Checkout repo @ SHA - ${{ github.sha }} + uses: actions/checkout@v4 + + - name: Set up Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: "3.10" - name: Install tox run: pip install tox @@ -3965,10 +3320,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-grpc-0 -- -ra + run: tox -e py310-test-opentelemetry-exporter-otlp-proto-http -- -ra - py311-test-opentelemetry-exporter-otlp-proto-grpc-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-grpc-1 3.11 Windows + py311-test-opentelemetry-exporter-otlp-proto-http_windows-latest: + name: opentelemetry-exporter-otlp-proto-http 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -3986,10 +3341,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-grpc-1 -- -ra + run: tox -e py311-test-opentelemetry-exporter-otlp-proto-http -- -ra - py312-test-opentelemetry-exporter-otlp-proto-grpc-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-grpc-0 3.12 Windows + py312-test-opentelemetry-exporter-otlp-proto-http_windows-latest: + name: opentelemetry-exporter-otlp-proto-http 3.12 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4007,19 +3362,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-grpc-0 -- -ra + run: tox -e py312-test-opentelemetry-exporter-otlp-proto-http -- -ra - py312-test-opentelemetry-exporter-otlp-proto-grpc-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-grpc-1 3.12 Windows + pypy3-test-opentelemetry-exporter-otlp-proto-http_windows-latest: + name: opentelemetry-exporter-otlp-proto-http pypy-3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python pypy-3.8 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "pypy-3.8" - name: Install tox run: pip install tox @@ -4028,10 +3383,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-grpc-1 -- -ra + run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-http -- -ra - py38-test-opentelemetry-exporter-otlp-proto-http-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-0 3.8 Windows + py38-test-opentelemetry-exporter-prometheus_windows-latest: + name: opentelemetry-exporter-prometheus 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4049,19 +3404,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra + run: tox -e py38-test-opentelemetry-exporter-prometheus -- -ra - py38-test-opentelemetry-exporter-otlp-proto-http-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-1 3.8 Windows + py39-test-opentelemetry-exporter-prometheus_windows-latest: + name: opentelemetry-exporter-prometheus 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" - name: Install tox run: pip install tox @@ -4070,19 +3425,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra + run: tox -e py39-test-opentelemetry-exporter-prometheus -- -ra - py39-test-opentelemetry-exporter-otlp-proto-http-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-0 3.9 Windows + py310-test-opentelemetry-exporter-prometheus_windows-latest: + name: opentelemetry-exporter-prometheus 3.10 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.10" - name: Install tox run: pip install tox @@ -4091,19 +3446,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra + run: tox -e py310-test-opentelemetry-exporter-prometheus -- -ra - py39-test-opentelemetry-exporter-otlp-proto-http-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-1 3.9 Windows + py311-test-opentelemetry-exporter-prometheus_windows-latest: + name: opentelemetry-exporter-prometheus 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.11" - name: Install tox run: pip install tox @@ -4112,19 +3467,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra + run: tox -e py311-test-opentelemetry-exporter-prometheus -- -ra - py310-test-opentelemetry-exporter-otlp-proto-http-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-0 3.10 Windows + py312-test-opentelemetry-exporter-prometheus_windows-latest: + name: opentelemetry-exporter-prometheus 3.12 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python 3.12 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.12" - name: Install tox run: pip install tox @@ -4133,19 +3488,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra + run: tox -e py312-test-opentelemetry-exporter-prometheus -- -ra - py310-test-opentelemetry-exporter-otlp-proto-http-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-1 3.10 Windows + pypy3-test-opentelemetry-exporter-prometheus_windows-latest: + name: opentelemetry-exporter-prometheus pypy-3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python pypy-3.8 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "pypy-3.8" - name: Install tox run: pip install tox @@ -4154,19 +3509,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra + run: tox -e pypy3-test-opentelemetry-exporter-prometheus -- -ra - py311-test-opentelemetry-exporter-otlp-proto-http-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-0 3.11 Windows + py38-test-opentelemetry-exporter-zipkin-combined_windows-latest: + name: opentelemetry-exporter-zipkin-combined 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.8 uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.8" - name: Install tox run: pip install tox @@ -4175,19 +3530,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra + run: tox -e py38-test-opentelemetry-exporter-zipkin-combined -- -ra - py311-test-opentelemetry-exporter-otlp-proto-http-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-1 3.11 Windows + py39-test-opentelemetry-exporter-zipkin-combined_windows-latest: + name: opentelemetry-exporter-zipkin-combined 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.9 uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.9" - name: Install tox run: pip install tox @@ -4196,19 +3551,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra + run: tox -e py39-test-opentelemetry-exporter-zipkin-combined -- -ra - py312-test-opentelemetry-exporter-otlp-proto-http-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-0 3.12 Windows + py310-test-opentelemetry-exporter-zipkin-combined_windows-latest: + name: opentelemetry-exporter-zipkin-combined 3.10 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.10" - name: Install tox run: pip install tox @@ -4217,19 +3572,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra + run: tox -e py310-test-opentelemetry-exporter-zipkin-combined -- -ra - py312-test-opentelemetry-exporter-otlp-proto-http-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-1 3.12 Windows + py311-test-opentelemetry-exporter-zipkin-combined_windows-latest: + name: opentelemetry-exporter-zipkin-combined 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.11" - name: Install tox run: pip install tox @@ -4238,19 +3593,19 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra + run: tox -e py311-test-opentelemetry-exporter-zipkin-combined -- -ra - pypy3-test-opentelemetry-exporter-otlp-proto-http-0_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-0 pypy-3.8 Windows + py312-test-opentelemetry-exporter-zipkin-combined_windows-latest: + name: opentelemetry-exporter-zipkin-combined 3.12 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} uses: actions/checkout@v4 - - name: Set up Python pypy-3.8 + - name: Set up Python 3.12 uses: actions/setup-python@v5 with: - python-version: "pypy-3.8" + python-version: "3.12" - name: Install tox run: pip install tox @@ -4259,10 +3614,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-http-0 -- -ra + run: tox -e py312-test-opentelemetry-exporter-zipkin-combined -- -ra - pypy3-test-opentelemetry-exporter-otlp-proto-http-1_windows-latest: - name: opentelemetry-exporter-otlp-proto-http-1 pypy-3.8 Windows + pypy3-test-opentelemetry-exporter-zipkin-combined_windows-latest: + name: opentelemetry-exporter-zipkin-combined pypy-3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4280,10 +3635,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-http-1 -- -ra + run: tox -e pypy3-test-opentelemetry-exporter-zipkin-combined -- -ra - py38-test-opentelemetry-exporter-prometheus_windows-latest: - name: opentelemetry-exporter-prometheus 3.8 Windows + py38-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: + name: opentelemetry-exporter-zipkin-proto-http 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4301,10 +3656,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-prometheus -- -ra + run: tox -e py38-test-opentelemetry-exporter-zipkin-proto-http -- -ra - py39-test-opentelemetry-exporter-prometheus_windows-latest: - name: opentelemetry-exporter-prometheus 3.9 Windows + py39-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: + name: opentelemetry-exporter-zipkin-proto-http 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4322,10 +3677,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-prometheus -- -ra + run: tox -e py39-test-opentelemetry-exporter-zipkin-proto-http -- -ra - py310-test-opentelemetry-exporter-prometheus_windows-latest: - name: opentelemetry-exporter-prometheus 3.10 Windows + py310-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: + name: opentelemetry-exporter-zipkin-proto-http 3.10 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4343,10 +3698,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-prometheus -- -ra + run: tox -e py310-test-opentelemetry-exporter-zipkin-proto-http -- -ra - py311-test-opentelemetry-exporter-prometheus_windows-latest: - name: opentelemetry-exporter-prometheus 3.11 Windows + py311-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: + name: opentelemetry-exporter-zipkin-proto-http 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4364,10 +3719,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-prometheus -- -ra + run: tox -e py311-test-opentelemetry-exporter-zipkin-proto-http -- -ra - py312-test-opentelemetry-exporter-prometheus_windows-latest: - name: opentelemetry-exporter-prometheus 3.12 Windows + py312-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: + name: opentelemetry-exporter-zipkin-proto-http 3.12 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4385,10 +3740,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-prometheus -- -ra + run: tox -e py312-test-opentelemetry-exporter-zipkin-proto-http -- -ra - pypy3-test-opentelemetry-exporter-prometheus_windows-latest: - name: opentelemetry-exporter-prometheus pypy-3.8 Windows + pypy3-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: + name: opentelemetry-exporter-zipkin-proto-http pypy-3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4406,10 +3761,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-prometheus -- -ra + run: tox -e pypy3-test-opentelemetry-exporter-zipkin-proto-http -- -ra - py38-test-opentelemetry-exporter-zipkin-combined_windows-latest: - name: opentelemetry-exporter-zipkin-combined 3.8 Windows + py38-test-opentelemetry-exporter-zipkin-json_windows-latest: + name: opentelemetry-exporter-zipkin-json 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4427,10 +3782,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-zipkin-combined -- -ra + run: tox -e py38-test-opentelemetry-exporter-zipkin-json -- -ra - py39-test-opentelemetry-exporter-zipkin-combined_windows-latest: - name: opentelemetry-exporter-zipkin-combined 3.9 Windows + py39-test-opentelemetry-exporter-zipkin-json_windows-latest: + name: opentelemetry-exporter-zipkin-json 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4448,10 +3803,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-zipkin-combined -- -ra + run: tox -e py39-test-opentelemetry-exporter-zipkin-json -- -ra - py310-test-opentelemetry-exporter-zipkin-combined_windows-latest: - name: opentelemetry-exporter-zipkin-combined 3.10 Windows + py310-test-opentelemetry-exporter-zipkin-json_windows-latest: + name: opentelemetry-exporter-zipkin-json 3.10 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4469,10 +3824,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-zipkin-combined -- -ra + run: tox -e py310-test-opentelemetry-exporter-zipkin-json -- -ra - py311-test-opentelemetry-exporter-zipkin-combined_windows-latest: - name: opentelemetry-exporter-zipkin-combined 3.11 Windows + py311-test-opentelemetry-exporter-zipkin-json_windows-latest: + name: opentelemetry-exporter-zipkin-json 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4490,10 +3845,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-zipkin-combined -- -ra + run: tox -e py311-test-opentelemetry-exporter-zipkin-json -- -ra - py312-test-opentelemetry-exporter-zipkin-combined_windows-latest: - name: opentelemetry-exporter-zipkin-combined 3.12 Windows + py312-test-opentelemetry-exporter-zipkin-json_windows-latest: + name: opentelemetry-exporter-zipkin-json 3.12 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4511,10 +3866,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-zipkin-combined -- -ra + run: tox -e py312-test-opentelemetry-exporter-zipkin-json -- -ra - pypy3-test-opentelemetry-exporter-zipkin-combined_windows-latest: - name: opentelemetry-exporter-zipkin-combined pypy-3.8 Windows + pypy3-test-opentelemetry-exporter-zipkin-json_windows-latest: + name: opentelemetry-exporter-zipkin-json pypy-3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4532,10 +3887,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-zipkin-combined -- -ra + run: tox -e pypy3-test-opentelemetry-exporter-zipkin-json -- -ra - py38-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: - name: opentelemetry-exporter-zipkin-proto-http 3.8 Windows + py38-test-opentelemetry-propagator-b3_windows-latest: + name: opentelemetry-propagator-b3 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4553,10 +3908,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-zipkin-proto-http -- -ra + run: tox -e py38-test-opentelemetry-propagator-b3 -- -ra - py39-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: - name: opentelemetry-exporter-zipkin-proto-http 3.9 Windows + py39-test-opentelemetry-propagator-b3_windows-latest: + name: opentelemetry-propagator-b3 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4574,10 +3929,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-zipkin-proto-http -- -ra + run: tox -e py39-test-opentelemetry-propagator-b3 -- -ra - py310-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: - name: opentelemetry-exporter-zipkin-proto-http 3.10 Windows + py310-test-opentelemetry-propagator-b3_windows-latest: + name: opentelemetry-propagator-b3 3.10 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4595,10 +3950,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-zipkin-proto-http -- -ra + run: tox -e py310-test-opentelemetry-propagator-b3 -- -ra - py311-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: - name: opentelemetry-exporter-zipkin-proto-http 3.11 Windows + py311-test-opentelemetry-propagator-b3_windows-latest: + name: opentelemetry-propagator-b3 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4616,10 +3971,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-zipkin-proto-http -- -ra + run: tox -e py311-test-opentelemetry-propagator-b3 -- -ra - py312-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: - name: opentelemetry-exporter-zipkin-proto-http 3.12 Windows + py312-test-opentelemetry-propagator-b3_windows-latest: + name: opentelemetry-propagator-b3 3.12 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4637,10 +3992,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-zipkin-proto-http -- -ra + run: tox -e py312-test-opentelemetry-propagator-b3 -- -ra - pypy3-test-opentelemetry-exporter-zipkin-proto-http_windows-latest: - name: opentelemetry-exporter-zipkin-proto-http pypy-3.8 Windows + pypy3-test-opentelemetry-propagator-b3_windows-latest: + name: opentelemetry-propagator-b3 pypy-3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4658,10 +4013,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-zipkin-proto-http -- -ra + run: tox -e pypy3-test-opentelemetry-propagator-b3 -- -ra - py38-test-opentelemetry-exporter-zipkin-json_windows-latest: - name: opentelemetry-exporter-zipkin-json 3.8 Windows + py38-test-opentelemetry-propagator-jaeger_windows-latest: + name: opentelemetry-propagator-jaeger 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4679,10 +4034,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-exporter-zipkin-json -- -ra + run: tox -e py38-test-opentelemetry-propagator-jaeger -- -ra - py39-test-opentelemetry-exporter-zipkin-json_windows-latest: - name: opentelemetry-exporter-zipkin-json 3.9 Windows + py39-test-opentelemetry-propagator-jaeger_windows-latest: + name: opentelemetry-propagator-jaeger 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4700,10 +4055,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-exporter-zipkin-json -- -ra + run: tox -e py39-test-opentelemetry-propagator-jaeger -- -ra - py310-test-opentelemetry-exporter-zipkin-json_windows-latest: - name: opentelemetry-exporter-zipkin-json 3.10 Windows + py310-test-opentelemetry-propagator-jaeger_windows-latest: + name: opentelemetry-propagator-jaeger 3.10 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4721,10 +4076,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-exporter-zipkin-json -- -ra + run: tox -e py310-test-opentelemetry-propagator-jaeger -- -ra - py311-test-opentelemetry-exporter-zipkin-json_windows-latest: - name: opentelemetry-exporter-zipkin-json 3.11 Windows + py311-test-opentelemetry-propagator-jaeger_windows-latest: + name: opentelemetry-propagator-jaeger 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4742,10 +4097,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-exporter-zipkin-json -- -ra + run: tox -e py311-test-opentelemetry-propagator-jaeger -- -ra - py312-test-opentelemetry-exporter-zipkin-json_windows-latest: - name: opentelemetry-exporter-zipkin-json 3.12 Windows + py312-test-opentelemetry-propagator-jaeger_windows-latest: + name: opentelemetry-propagator-jaeger 3.12 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4763,10 +4118,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py312-test-opentelemetry-exporter-zipkin-json -- -ra + run: tox -e py312-test-opentelemetry-propagator-jaeger -- -ra - pypy3-test-opentelemetry-exporter-zipkin-json_windows-latest: - name: opentelemetry-exporter-zipkin-json pypy-3.8 Windows + pypy3-test-opentelemetry-propagator-jaeger_windows-latest: + name: opentelemetry-propagator-jaeger pypy-3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4784,10 +4139,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e pypy3-test-opentelemetry-exporter-zipkin-json -- -ra + run: tox -e pypy3-test-opentelemetry-propagator-jaeger -- -ra - py38-test-opentelemetry-propagator-b3_windows-latest: - name: opentelemetry-propagator-b3 3.8 Windows + py38-test-opentelemetry-test-utils_windows-latest: + name: opentelemetry-test-utils 3.8 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4805,10 +4160,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py38-test-opentelemetry-propagator-b3 -- -ra + run: tox -e py38-test-opentelemetry-test-utils -- -ra - py39-test-opentelemetry-propagator-b3_windows-latest: - name: opentelemetry-propagator-b3 3.9 Windows + py39-test-opentelemetry-test-utils_windows-latest: + name: opentelemetry-test-utils 3.9 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4826,10 +4181,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py39-test-opentelemetry-propagator-b3 -- -ra + run: tox -e py39-test-opentelemetry-test-utils -- -ra - py310-test-opentelemetry-propagator-b3_windows-latest: - name: opentelemetry-propagator-b3 3.10 Windows + py310-test-opentelemetry-test-utils_windows-latest: + name: opentelemetry-test-utils 3.10 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4847,10 +4202,10 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py310-test-opentelemetry-propagator-b3 -- -ra + run: tox -e py310-test-opentelemetry-test-utils -- -ra - py311-test-opentelemetry-propagator-b3_windows-latest: - name: opentelemetry-propagator-b3 3.11 Windows + py311-test-opentelemetry-test-utils_windows-latest: + name: opentelemetry-test-utils 3.11 Windows runs-on: windows-latest steps: - name: Checkout repo @ SHA - ${{ github.sha }} @@ -4868,4 +4223,46 @@ jobs: run: git config --system core.longpaths true - name: Run tests - run: tox -e py311-test-opentelemetry-propagator-b3 -- -ra + run: tox -e py311-test-opentelemetry-test-utils -- -ra + + py312-test-opentelemetry-test-utils_windows-latest: + name: opentelemetry-test-utils 3.12 Windows + runs-on: windows-latest + steps: + - name: Checkout repo @ SHA - ${{ github.sha }} + uses: actions/checkout@v4 + + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install tox + run: pip install tox + + - name: Configure git to support long filenames + run: git config --system core.longpaths true + + - name: Run tests + run: tox -e py312-test-opentelemetry-test-utils -- -ra + + pypy3-test-opentelemetry-test-utils_windows-latest: + name: opentelemetry-test-utils pypy-3.8 Windows + runs-on: windows-latest + steps: + - name: Checkout repo @ SHA - ${{ github.sha }} + uses: actions/checkout@v4 + + - name: Set up Python pypy-3.8 + uses: actions/setup-python@v5 + with: + python-version: "pypy-3.8" + + - name: Install tox + run: pip install tox + + - name: Configure git to support long filenames + run: git config --system core.longpaths true + + - name: Run tests + run: tox -e pypy3-test-opentelemetry-test-utils -- -ra diff --git a/CHANGELOG.md b/CHANGELOG.md index e4ea8a8bf1a..72721ef626a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ([#4176](https://github.com/open-telemetry/opentelemetry-python/pull/4176)) - Update semantic conventions to version 1.28.0 ([#4218](https://github.com/open-telemetry/opentelemetry-python/pull/4218)) +- Add support to protobuf 5+ and drop support to protobuf 3 and 4 + ([#4206](https://github.com/open-telemetry/opentelemetry-python/pull/4206)) - Update environment variable descriptions to match signal ([#4222](https://github.com/open-telemetry/opentelemetry-python/pull/4222)) diff --git a/docs-requirements.txt b/docs-requirements.txt index 799a10cb8e8..2a8d99844f2 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -25,7 +25,4 @@ opentracing~=2.2.0 thrift~=0.10 wrapt>=1.0.0,<2.0.0 markupsafe~=2.0 - -# Jaeger generated protobufs do not currently support protobuf 4.x. This can be removed once -# they're regenerated. -protobuf~=3.19 +protobuf==5.26.1 diff --git a/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements-1.txt b/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements-1.txt deleted file mode 100644 index 27e61810db3..00000000000 --- a/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements-1.txt +++ /dev/null @@ -1,19 +0,0 @@ -asgiref==3.7.2 -Deprecated==1.2.14 -importlib-metadata==6.11.0 -iniconfig==2.0.0 -packaging==24.0 -pluggy==1.5.0 -protobuf==4.25.3 -py-cpuinfo==9.0.0 -pytest==7.4.4 -tomli==2.0.1 -typing_extensions==4.10.0 -wrapt==1.16.0 -zipp==3.19.2 --e opentelemetry-api --e opentelemetry-sdk --e opentelemetry-semantic-conventions --e tests/opentelemetry-test-utils --e opentelemetry-proto --e exporter/opentelemetry-exporter-otlp-proto-common diff --git a/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements-0.txt b/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements.txt similarity index 95% rename from exporter/opentelemetry-exporter-otlp-proto-common/test-requirements-0.txt rename to exporter/opentelemetry-exporter-otlp-proto-common/test-requirements.txt index 093b69f3fe2..6ab1dbf75b7 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements-0.txt +++ b/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements.txt @@ -4,7 +4,7 @@ importlib-metadata==6.11.0 iniconfig==2.0.0 packaging==24.0 pluggy==1.5.0 -protobuf==3.20.3 +protobuf==5.26.1 py-cpuinfo==9.0.0 pytest==7.4.4 tomli==2.0.1 diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/pyproject.toml b/exporter/opentelemetry-exporter-otlp-proto-grpc/pyproject.toml index c6c3fedc6fd..f6f48d9205e 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/pyproject.toml +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/pyproject.toml @@ -29,7 +29,7 @@ classifiers = [ dependencies = [ "Deprecated >= 1.2.6", "googleapis-common-protos ~= 1.52", - "grpcio >= 1.0.0, < 2.0.0", + "grpcio >= 1.63.2, < 2.0.0", "opentelemetry-api ~= 1.15", "opentelemetry-proto == 1.28.0.dev", "opentelemetry-sdk ~= 1.28.0.dev", diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements-1.txt b/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements-1.txt deleted file mode 100644 index 250b55bed86..00000000000 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements-1.txt +++ /dev/null @@ -1,22 +0,0 @@ -asgiref==3.7.2 -Deprecated==1.2.14 -googleapis-common-protos==1.62.0 -grpcio==1.62.0 -importlib-metadata==6.11.0 -iniconfig==2.0.0 -packaging==24.0 -pluggy==1.5.0 -protobuf==4.25.3 -py-cpuinfo==9.0.0 -pytest==7.4.4 -tomli==2.0.1 -typing_extensions==4.10.0 -wrapt==1.16.0 -zipp==3.19.2 --e opentelemetry-api --e tests/opentelemetry-test-utils --e exporter/opentelemetry-exporter-otlp-proto-common --e opentelemetry-proto --e opentelemetry-sdk --e opentelemetry-semantic-conventions --e exporter/opentelemetry-exporter-otlp-proto-grpc diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements-0.txt b/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements.txt similarity index 87% rename from exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements-0.txt rename to exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements.txt index abbd8927d0f..28d778461a9 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements-0.txt +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements.txt @@ -1,12 +1,12 @@ asgiref==3.7.2 Deprecated==1.2.14 -googleapis-common-protos==1.62.0 -grpcio==1.62.0 +googleapis-common-protos==1.63.2 +grpcio==1.66.2 importlib-metadata==6.11.0 iniconfig==2.0.0 packaging==24.0 pluggy==1.5.0 -protobuf==3.20.3 +protobuf==5.26.1 py-cpuinfo==9.0.0 pytest==7.4.4 tomli==2.0.1 diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/logs/test_otlp_logs_exporter.py b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/logs/test_otlp_logs_exporter.py index fc2211c5aeb..5840b7360f2 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/logs/test_otlp_logs_exporter.py +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/logs/test_otlp_logs_exporter.py @@ -24,7 +24,9 @@ Duration, ) from google.protobuf.json_format import MessageToDict -from google.rpc.error_details_pb2 import RetryInfo +from google.rpc.error_details_pb2 import ( # pylint: disable=no-name-in-module + RetryInfo, +) from grpc import ChannelCredentials, Compression, StatusCode, server from opentelemetry._logs import SeverityNumber diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_exporter_mixin.py b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_exporter_mixin.py index f7bbdabb11f..d9b02611a07 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_exporter_mixin.py +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_exporter_mixin.py @@ -23,7 +23,9 @@ from google.protobuf.duration_pb2 import ( # pylint: disable=no-name-in-module Duration, ) -from google.rpc.error_details_pb2 import RetryInfo +from google.rpc.error_details_pb2 import ( # pylint: disable=no-name-in-module + RetryInfo, +) from grpc import Compression from opentelemetry.exporter.otlp.proto.grpc.exporter import ( diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_metrics_exporter.py b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_metrics_exporter.py index f9f9427b776..d1b7ce9df59 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_metrics_exporter.py +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_metrics_exporter.py @@ -29,7 +29,9 @@ from google.protobuf.duration_pb2 import ( # pylint: disable=no-name-in-module Duration, ) -from google.rpc.error_details_pb2 import RetryInfo +from google.rpc.error_details_pb2 import ( # pylint: disable=no-name-in-module + RetryInfo, +) from grpc import ChannelCredentials, Compression, StatusCode, server from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import ( diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_trace_exporter.py b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_trace_exporter.py index d618ffb13a3..7facce1f70f 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_trace_exporter.py +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_trace_exporter.py @@ -25,7 +25,9 @@ from google.protobuf.duration_pb2 import ( # pylint: disable=no-name-in-module Duration, ) -from google.rpc.error_details_pb2 import RetryInfo +from google.rpc.error_details_pb2 import ( # pylint: disable=no-name-in-module + RetryInfo, +) from grpc import ChannelCredentials, Compression, StatusCode, server from opentelemetry.attributes import BoundedAttributes diff --git a/exporter/opentelemetry-exporter-otlp-proto-http/test-requirements-1.txt b/exporter/opentelemetry-exporter-otlp-proto-http/test-requirements-1.txt deleted file mode 100644 index 28e23c5edf2..00000000000 --- a/exporter/opentelemetry-exporter-otlp-proto-http/test-requirements-1.txt +++ /dev/null @@ -1,28 +0,0 @@ -asgiref==3.7.2 -certifi==2024.7.4 -charset-normalizer==3.3.2 -Deprecated==1.2.14 -googleapis-common-protos==1.62.0 -idna==3.7 -importlib-metadata==6.11.0 -iniconfig==2.0.0 -packaging==24.0 -pluggy==1.5.0 -protobuf==4.25.3 -py-cpuinfo==9.0.0 -pytest==7.4.4 -PyYAML==6.0.1 -requests==2.32.3 -responses==0.24.1 -tomli==2.0.1 -typing_extensions==4.10.0 -urllib3==2.2.2 -wrapt==1.16.0 -zipp==3.19.2 --e opentelemetry-api --e tests/opentelemetry-test-utils --e exporter/opentelemetry-exporter-otlp-proto-common --e opentelemetry-proto --e opentelemetry-sdk --e opentelemetry-semantic-conventions --e exporter/opentelemetry-exporter-otlp-proto-http diff --git a/exporter/opentelemetry-exporter-otlp-proto-http/test-requirements-0.txt b/exporter/opentelemetry-exporter-otlp-proto-http/test-requirements.txt similarity index 91% rename from exporter/opentelemetry-exporter-otlp-proto-http/test-requirements-0.txt rename to exporter/opentelemetry-exporter-otlp-proto-http/test-requirements.txt index 6d83270f10f..d601ca693bd 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-http/test-requirements-0.txt +++ b/exporter/opentelemetry-exporter-otlp-proto-http/test-requirements.txt @@ -2,13 +2,13 @@ asgiref==3.7.2 certifi==2024.7.4 charset-normalizer==3.3.2 Deprecated==1.2.14 -googleapis-common-protos==1.62.0 +googleapis-common-protos==1.63.2 idna==3.7 importlib-metadata==6.11.0 iniconfig==2.0.0 packaging==24.0 pluggy==1.5.0 -protobuf==3.20.3 +protobuf==5.26.1 py-cpuinfo==9.0.0 pytest==7.4.4 PyYAML==6.0.1 diff --git a/exporter/opentelemetry-exporter-zipkin-proto-http/test-requirements.txt b/exporter/opentelemetry-exporter-zipkin-proto-http/test-requirements.txt index c1913e6928d..a3b296b4805 100644 --- a/exporter/opentelemetry-exporter-zipkin-proto-http/test-requirements.txt +++ b/exporter/opentelemetry-exporter-zipkin-proto-http/test-requirements.txt @@ -17,7 +17,6 @@ urllib3==2.2.2 wrapt==1.16.0 zipp==3.19.2 -e opentelemetry-api --e opentelemetry-proto -e exporter/opentelemetry-exporter-zipkin-json -e opentelemetry-sdk -e tests/opentelemetry-test-utils diff --git a/gen-requirements.txt b/gen-requirements.txt index 88f8817b2df..3cd7e79a440 100644 --- a/gen-requirements.txt +++ b/gen-requirements.txt @@ -1,6 +1,5 @@ -# This version of grpcio-tools ships with protoc 3.19.4 which appears to be compatible with -# both protobuf 3.19.x and 4.x (see https://github.com/protocolbuffers/protobuf/issues/11123). -# Bump this version with caution to preserve compatibility with protobuf 3. -# https://github.com/open-telemetry/opentelemetry-python/blob/main/opentelemetry-proto/pyproject.toml#L28 -grpcio-tools==1.48.1 -mypy-protobuf~=3.0.0 +# Use caution when bumping this version to ensure compatibility with the currently supported protobuf version. +# Pinning this to the oldest grpcio version that supports protobuf 5 helps avoid RuntimeWarning messages +# from the generated protobuf code and ensures continued stability for newer grpcio versions. +grpcio-tools==1.63.2 +mypy-protobuf~=3.5.0 diff --git a/opentelemetry-proto/pyproject.toml b/opentelemetry-proto/pyproject.toml index 505a88f4d5c..6bf212218ff 100644 --- a/opentelemetry-proto/pyproject.toml +++ b/opentelemetry-proto/pyproject.toml @@ -26,7 +26,7 @@ classifiers = [ "Programming Language :: Python :: 3.12", ] dependencies = [ - "protobuf>=3.19, < 5.0", + "protobuf>=5.0, < 6.0", ] [project.urls] diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py index 5e6ae0ef926..17f7196eee6 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py @@ -1,59 +1,45 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: opentelemetry/proto/collector/logs/v1/logs_service.proto +# Protobuf Python Version: 5.26.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.logs.v1 import logs_pb2 as opentelemetry_dot_proto_dot_logs_dot_v1_dot_logs__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentelemetry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto\"\\\n\x18\x45xportLogsServiceRequest\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"u\n\x19\x45xportLogsServiceResponse\x12X\n\x0fpartial_success\x18\x01 \x01(\x0b\x32?.opentelemetry.proto.collector.logs.v1.ExportLogsPartialSuccess\"O\n\x18\x45xportLogsPartialSuccess\x12\x1c\n\x14rejected_log_records\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\x06\x45xport\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse\"\x00\x42\x98\x01\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProtoP\x01Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\xaa\x02%OpenTelemetry.Proto.Collector.Logs.V1b\x06proto3') - - - -_EXPORTLOGSSERVICEREQUEST = DESCRIPTOR.message_types_by_name['ExportLogsServiceRequest'] -_EXPORTLOGSSERVICERESPONSE = DESCRIPTOR.message_types_by_name['ExportLogsServiceResponse'] -_EXPORTLOGSPARTIALSUCCESS = DESCRIPTOR.message_types_by_name['ExportLogsPartialSuccess'] -ExportLogsServiceRequest = _reflection.GeneratedProtocolMessageType('ExportLogsServiceRequest', (_message.Message,), { - 'DESCRIPTOR' : _EXPORTLOGSSERVICEREQUEST, - '__module__' : 'opentelemetry.proto.collector.logs.v1.logs_service_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest) - }) -_sym_db.RegisterMessage(ExportLogsServiceRequest) - -ExportLogsServiceResponse = _reflection.GeneratedProtocolMessageType('ExportLogsServiceResponse', (_message.Message,), { - 'DESCRIPTOR' : _EXPORTLOGSSERVICERESPONSE, - '__module__' : 'opentelemetry.proto.collector.logs.v1.logs_service_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse) - }) -_sym_db.RegisterMessage(ExportLogsServiceResponse) - -ExportLogsPartialSuccess = _reflection.GeneratedProtocolMessageType('ExportLogsPartialSuccess', (_message.Message,), { - 'DESCRIPTOR' : _EXPORTLOGSPARTIALSUCCESS, - '__module__' : 'opentelemetry.proto.collector.logs.v1.logs_service_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.logs.v1.ExportLogsPartialSuccess) - }) -_sym_db.RegisterMessage(ExportLogsPartialSuccess) - -_LOGSSERVICE = DESCRIPTOR.services_by_name['LogsService'] -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n(io.opentelemetry.proto.collector.logs.v1B\020LogsServiceProtoP\001Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\252\002%OpenTelemetry.Proto.Collector.Logs.V1' - _EXPORTLOGSSERVICEREQUEST._serialized_start=139 - _EXPORTLOGSSERVICEREQUEST._serialized_end=231 - _EXPORTLOGSSERVICERESPONSE._serialized_start=233 - _EXPORTLOGSSERVICERESPONSE._serialized_end=350 - _EXPORTLOGSPARTIALSUCCESS._serialized_start=352 - _EXPORTLOGSPARTIALSUCCESS._serialized_end=431 - _LOGSSERVICE._serialized_start=434 - _LOGSSERVICE._serialized_end=591 +from opentelemetry.proto.logs.v1 import ( + logs_pb2 as opentelemetry_dot_proto_dot_logs_dot_v1_dot_logs__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentelemetry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto"\\\n\x18\x45xportLogsServiceRequest\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs"u\n\x19\x45xportLogsServiceResponse\x12X\n\x0fpartial_success\x18\x01 \x01(\x0b\x32?.opentelemetry.proto.collector.logs.v1.ExportLogsPartialSuccess"O\n\x18\x45xportLogsPartialSuccess\x12\x1c\n\x14rejected_log_records\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\x06\x45xport\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse"\x00\x42\x98\x01\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProtoP\x01Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\xaa\x02%OpenTelemetry.Proto.Collector.Logs.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "opentelemetry.proto.collector.logs.v1.logs_service_pb2", + _globals, +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n(io.opentelemetry.proto.collector.logs.v1B\020LogsServiceProtoP\001Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\252\002%OpenTelemetry.Proto.Collector.Logs.V1" + ) + _globals["_EXPORTLOGSSERVICEREQUEST"]._serialized_start = 139 + _globals["_EXPORTLOGSSERVICEREQUEST"]._serialized_end = 231 + _globals["_EXPORTLOGSSERVICERESPONSE"]._serialized_start = 233 + _globals["_EXPORTLOGSSERVICERESPONSE"]._serialized_end = 350 + _globals["_EXPORTLOGSPARTIALSUCCESS"]._serialized_start = 352 + _globals["_EXPORTLOGSPARTIALSUCCESS"]._serialized_end = 431 + _globals["_LOGSSERVICE"]._serialized_start = 434 + _globals["_LOGSSERVICE"]._serialized_end = 591 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi index cdf57e9fa1c..e07c1184ebf 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi @@ -1,38 +1,77 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file +Copyright 2020, OpenTelemetry Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. """ + import builtins +import collections.abc import google.protobuf.descriptor import google.protobuf.internal.containers import google.protobuf.message import opentelemetry.proto.logs.v1.logs_pb2 -import typing -import typing_extensions +import sys -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final class ExportLogsServiceRequest(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor + RESOURCE_LOGS_FIELD_NUMBER: builtins.int @property - def resource_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs]: + def resource_logs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs + ]: """An array of ResourceLogs. For data coming from a single resource this array will typically contain one element. Intermediary nodes (such as OpenTelemetry Collector) that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - pass - def __init__(self, + + def __init__( + self, *, - resource_logs : typing.Optional[typing.Iterable[opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs]] = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["resource_logs",b"resource_logs"]) -> None: ... + resource_logs: ( + collections.abc.Iterable[ + opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs + ] + | None + ) = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "resource_logs", b"resource_logs" + ], + ) -> None: ... + global___ExportLogsServiceRequest = ExportLogsServiceRequest +@typing_extensions.final class ExportLogsServiceResponse(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor + PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int @property def partial_success(self) -> global___ExportLogsPartialSuccess: @@ -52,27 +91,40 @@ class ExportLogsServiceResponse(google.protobuf.message.Message): `error_message` = "") is equivalent to it not being set/present. Senders SHOULD interpret it the same way as in the full success case. """ - pass - def __init__(self, + + def __init__( + self, *, - partial_success : typing.Optional[global___ExportLogsPartialSuccess] = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> None: ... + partial_success: global___ExportLogsPartialSuccess | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "partial_success", b"partial_success" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "partial_success", b"partial_success" + ], + ) -> None: ... + global___ExportLogsServiceResponse = ExportLogsServiceResponse +@typing_extensions.final class ExportLogsPartialSuccess(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor + REJECTED_LOG_RECORDS_FIELD_NUMBER: builtins.int ERROR_MESSAGE_FIELD_NUMBER: builtins.int - rejected_log_records: builtins.int = ... + rejected_log_records: builtins.int """The number of rejected log records. A `rejected_` field holding a `0` value indicates that the request was fully accepted. """ - - error_message: typing.Text = ... + error_message: builtins.str """A developer-facing human-readable message in English. It should be used either to explain why the server rejected parts of the data during a partial success or to convey warnings/suggestions during a full success. The message @@ -81,11 +133,20 @@ class ExportLogsPartialSuccess(google.protobuf.message.Message): error_message is an optional field. An error_message with an empty value is equivalent to it not being set. """ - - def __init__(self, + def __init__( + self, *, - rejected_log_records : builtins.int = ..., - error_message : typing.Text = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["error_message",b"error_message","rejected_log_records",b"rejected_log_records"]) -> None: ... + rejected_log_records: builtins.int = ..., + error_message: builtins.str = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "error_message", + b"error_message", + "rejected_log_records", + b"rejected_log_records", + ], + ) -> None: ... + global___ExportLogsPartialSuccess = ExportLogsPartialSuccess diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py index 4d55e577786..99c720386b5 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py @@ -1,8 +1,38 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc +import warnings -from opentelemetry.proto.collector.logs.v1 import logs_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2 +from opentelemetry.proto.collector.logs.v1 import ( + logs_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2, +) + +GRPC_GENERATED_VERSION = "1.63.2" +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, + ) class LogsServiceStub(object): @@ -18,10 +48,11 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Export = channel.unary_unary( - '/opentelemetry.proto.collector.logs.v1.LogsService/Export', - request_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString, - response_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString, - ) + "/opentelemetry.proto.collector.logs.v1.LogsService/Export", + request_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString, + response_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString, + _registered_method=True, + ) class LogsServiceServicer(object): @@ -35,24 +66,26 @@ def Export(self, request, context): alive for the entire life of the application. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_LogsServiceServicer_to_server(servicer, server): rpc_method_handlers = { - 'Export': grpc.unary_unary_rpc_method_handler( - servicer.Export, - request_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.FromString, - response_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.SerializeToString, - ), + "Export": grpc.unary_unary_rpc_method_handler( + servicer.Export, + request_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.FromString, + response_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'opentelemetry.proto.collector.logs.v1.LogsService', rpc_method_handlers) + "opentelemetry.proto.collector.logs.v1.LogsService", + rpc_method_handlers, + ) server.add_generic_rpc_handlers((generic_handler,)) - # This class is part of an EXPERIMENTAL API. +# This class is part of an EXPERIMENTAL API. class LogsService(object): """Service that can be used to push logs between one Application instrumented with OpenTelemetry and an collector, or between an collector and a central collector (in this @@ -60,18 +93,31 @@ class LogsService(object): """ @staticmethod - def Export(request, + def Export( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/opentelemetry.proto.collector.logs.v1.LogsService/Export', + "/opentelemetry.proto.collector.logs.v1.LogsService/Export", opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString, opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py index 1d9021d7029..f25b8801525 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py @@ -1,59 +1,45 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: opentelemetry/proto/collector/metrics/v1/metrics_service.proto +# Protobuf Python Version: 5.26.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.metrics.v1 import metrics_pb2 as opentelemetry_dot_proto_dot_metrics_dot_v1_dot_metrics__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n>opentelemetry/proto/collector/metrics/v1/metrics_service.proto\x12(opentelemetry.proto.collector.metrics.v1\x1a,opentelemetry/proto/metrics/v1/metrics.proto\"h\n\x1b\x45xportMetricsServiceRequest\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics\"~\n\x1c\x45xportMetricsServiceResponse\x12^\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsPartialSuccess\"R\n\x1b\x45xportMetricsPartialSuccess\x12\x1c\n\x14rejected_data_points\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xac\x01\n\x0eMetricsService\x12\x99\x01\n\x06\x45xport\x12\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest\x1a\x46.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse\"\x00\x42\xa4\x01\n+io.opentelemetry.proto.collector.metrics.v1B\x13MetricsServiceProtoP\x01Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\xaa\x02(OpenTelemetry.Proto.Collector.Metrics.V1b\x06proto3') - - - -_EXPORTMETRICSSERVICEREQUEST = DESCRIPTOR.message_types_by_name['ExportMetricsServiceRequest'] -_EXPORTMETRICSSERVICERESPONSE = DESCRIPTOR.message_types_by_name['ExportMetricsServiceResponse'] -_EXPORTMETRICSPARTIALSUCCESS = DESCRIPTOR.message_types_by_name['ExportMetricsPartialSuccess'] -ExportMetricsServiceRequest = _reflection.GeneratedProtocolMessageType('ExportMetricsServiceRequest', (_message.Message,), { - 'DESCRIPTOR' : _EXPORTMETRICSSERVICEREQUEST, - '__module__' : 'opentelemetry.proto.collector.metrics.v1.metrics_service_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest) - }) -_sym_db.RegisterMessage(ExportMetricsServiceRequest) - -ExportMetricsServiceResponse = _reflection.GeneratedProtocolMessageType('ExportMetricsServiceResponse', (_message.Message,), { - 'DESCRIPTOR' : _EXPORTMETRICSSERVICERESPONSE, - '__module__' : 'opentelemetry.proto.collector.metrics.v1.metrics_service_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse) - }) -_sym_db.RegisterMessage(ExportMetricsServiceResponse) - -ExportMetricsPartialSuccess = _reflection.GeneratedProtocolMessageType('ExportMetricsPartialSuccess', (_message.Message,), { - 'DESCRIPTOR' : _EXPORTMETRICSPARTIALSUCCESS, - '__module__' : 'opentelemetry.proto.collector.metrics.v1.metrics_service_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.metrics.v1.ExportMetricsPartialSuccess) - }) -_sym_db.RegisterMessage(ExportMetricsPartialSuccess) - -_METRICSSERVICE = DESCRIPTOR.services_by_name['MetricsService'] -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n+io.opentelemetry.proto.collector.metrics.v1B\023MetricsServiceProtoP\001Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\252\002(OpenTelemetry.Proto.Collector.Metrics.V1' - _EXPORTMETRICSSERVICEREQUEST._serialized_start=154 - _EXPORTMETRICSSERVICEREQUEST._serialized_end=258 - _EXPORTMETRICSSERVICERESPONSE._serialized_start=260 - _EXPORTMETRICSSERVICERESPONSE._serialized_end=386 - _EXPORTMETRICSPARTIALSUCCESS._serialized_start=388 - _EXPORTMETRICSPARTIALSUCCESS._serialized_end=470 - _METRICSSERVICE._serialized_start=473 - _METRICSSERVICE._serialized_end=645 +from opentelemetry.proto.metrics.v1 import ( + metrics_pb2 as opentelemetry_dot_proto_dot_metrics_dot_v1_dot_metrics__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n>opentelemetry/proto/collector/metrics/v1/metrics_service.proto\x12(opentelemetry.proto.collector.metrics.v1\x1a,opentelemetry/proto/metrics/v1/metrics.proto"h\n\x1b\x45xportMetricsServiceRequest\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics"~\n\x1c\x45xportMetricsServiceResponse\x12^\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsPartialSuccess"R\n\x1b\x45xportMetricsPartialSuccess\x12\x1c\n\x14rejected_data_points\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xac\x01\n\x0eMetricsService\x12\x99\x01\n\x06\x45xport\x12\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest\x1a\x46.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse"\x00\x42\xa4\x01\n+io.opentelemetry.proto.collector.metrics.v1B\x13MetricsServiceProtoP\x01Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\xaa\x02(OpenTelemetry.Proto.Collector.Metrics.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "opentelemetry.proto.collector.metrics.v1.metrics_service_pb2", + _globals, +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n+io.opentelemetry.proto.collector.metrics.v1B\023MetricsServiceProtoP\001Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\252\002(OpenTelemetry.Proto.Collector.Metrics.V1" + ) + _globals["_EXPORTMETRICSSERVICEREQUEST"]._serialized_start = 154 + _globals["_EXPORTMETRICSSERVICEREQUEST"]._serialized_end = 258 + _globals["_EXPORTMETRICSSERVICERESPONSE"]._serialized_start = 260 + _globals["_EXPORTMETRICSSERVICERESPONSE"]._serialized_end = 386 + _globals["_EXPORTMETRICSPARTIALSUCCESS"]._serialized_start = 388 + _globals["_EXPORTMETRICSPARTIALSUCCESS"]._serialized_end = 470 + _globals["_METRICSSERVICE"]._serialized_start = 473 + _globals["_METRICSSERVICE"]._serialized_end = 645 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi index ffd750bdf28..d38595d08d1 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi @@ -1,38 +1,77 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file +Copyright 2019, OpenTelemetry Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. """ + import builtins +import collections.abc import google.protobuf.descriptor import google.protobuf.internal.containers import google.protobuf.message import opentelemetry.proto.metrics.v1.metrics_pb2 -import typing -import typing_extensions +import sys -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final class ExportMetricsServiceRequest(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor + RESOURCE_METRICS_FIELD_NUMBER: builtins.int @property - def resource_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics]: + def resource_metrics( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics + ]: """An array of ResourceMetrics. For data coming from a single resource this array will typically contain one element. Intermediary nodes (such as OpenTelemetry Collector) that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - pass - def __init__(self, + + def __init__( + self, *, - resource_metrics : typing.Optional[typing.Iterable[opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics]] = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["resource_metrics",b"resource_metrics"]) -> None: ... + resource_metrics: ( + collections.abc.Iterable[ + opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics + ] + | None + ) = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "resource_metrics", b"resource_metrics" + ], + ) -> None: ... + global___ExportMetricsServiceRequest = ExportMetricsServiceRequest +@typing_extensions.final class ExportMetricsServiceResponse(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor + PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int @property def partial_success(self) -> global___ExportMetricsPartialSuccess: @@ -52,27 +91,40 @@ class ExportMetricsServiceResponse(google.protobuf.message.Message): `error_message` = "") is equivalent to it not being set/present. Senders SHOULD interpret it the same way as in the full success case. """ - pass - def __init__(self, + + def __init__( + self, *, - partial_success : typing.Optional[global___ExportMetricsPartialSuccess] = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> None: ... + partial_success: global___ExportMetricsPartialSuccess | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "partial_success", b"partial_success" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "partial_success", b"partial_success" + ], + ) -> None: ... + global___ExportMetricsServiceResponse = ExportMetricsServiceResponse +@typing_extensions.final class ExportMetricsPartialSuccess(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor + REJECTED_DATA_POINTS_FIELD_NUMBER: builtins.int ERROR_MESSAGE_FIELD_NUMBER: builtins.int - rejected_data_points: builtins.int = ... + rejected_data_points: builtins.int """The number of rejected data points. A `rejected_` field holding a `0` value indicates that the request was fully accepted. """ - - error_message: typing.Text = ... + error_message: builtins.str """A developer-facing human-readable message in English. It should be used either to explain why the server rejected parts of the data during a partial success or to convey warnings/suggestions during a full success. The message @@ -81,11 +133,20 @@ class ExportMetricsPartialSuccess(google.protobuf.message.Message): error_message is an optional field. An error_message with an empty value is equivalent to it not being set. """ - - def __init__(self, + def __init__( + self, *, - rejected_data_points : builtins.int = ..., - error_message : typing.Text = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["error_message",b"error_message","rejected_data_points",b"rejected_data_points"]) -> None: ... + rejected_data_points: builtins.int = ..., + error_message: builtins.str = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "error_message", + b"error_message", + "rejected_data_points", + b"rejected_data_points", + ], + ) -> None: ... + global___ExportMetricsPartialSuccess = ExportMetricsPartialSuccess diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py index c181c44641d..fcf72343f8b 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py @@ -1,8 +1,38 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc +import warnings -from opentelemetry.proto.collector.metrics.v1 import metrics_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2 +from opentelemetry.proto.collector.metrics.v1 import ( + metrics_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2, +) + +GRPC_GENERATED_VERSION = "1.63.2" +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, + ) class MetricsServiceStub(object): @@ -18,10 +48,11 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Export = channel.unary_unary( - '/opentelemetry.proto.collector.metrics.v1.MetricsService/Export', - request_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString, - response_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString, - ) + "/opentelemetry.proto.collector.metrics.v1.MetricsService/Export", + request_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString, + response_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString, + _registered_method=True, + ) class MetricsServiceServicer(object): @@ -35,24 +66,26 @@ def Export(self, request, context): alive for the entire life of the application. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_MetricsServiceServicer_to_server(servicer, server): rpc_method_handlers = { - 'Export': grpc.unary_unary_rpc_method_handler( - servicer.Export, - request_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.FromString, - response_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.SerializeToString, - ), + "Export": grpc.unary_unary_rpc_method_handler( + servicer.Export, + request_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.FromString, + response_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'opentelemetry.proto.collector.metrics.v1.MetricsService', rpc_method_handlers) + "opentelemetry.proto.collector.metrics.v1.MetricsService", + rpc_method_handlers, + ) server.add_generic_rpc_handlers((generic_handler,)) - # This class is part of an EXPERIMENTAL API. +# This class is part of an EXPERIMENTAL API. class MetricsService(object): """Service that can be used to push metrics between one Application instrumented with OpenTelemetry and a collector, or between a collector and a @@ -60,18 +93,31 @@ class MetricsService(object): """ @staticmethod - def Export(request, + def Export( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/opentelemetry.proto.collector.metrics.v1.MetricsService/Export', + "/opentelemetry.proto.collector.metrics.v1.MetricsService/Export", opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString, opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py index fff65da1b70..8ced74fb4c2 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py @@ -1,59 +1,45 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: opentelemetry/proto/collector/trace/v1/trace_service.proto +# Protobuf Python Version: 5.26.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.trace.v1 import trace_pb2 as opentelemetry_dot_proto_dot_trace_dot_v1_dot_trace__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n:opentelemetry/proto/collector/trace/v1/trace_service.proto\x12&opentelemetry.proto.collector.trace.v1\x1a(opentelemetry/proto/trace/v1/trace.proto\"`\n\x19\x45xportTraceServiceRequest\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans\"x\n\x1a\x45xportTraceServiceResponse\x12Z\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x41.opentelemetry.proto.collector.trace.v1.ExportTracePartialSuccess\"J\n\x19\x45xportTracePartialSuccess\x12\x16\n\x0erejected_spans\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xa2\x01\n\x0cTraceService\x12\x91\x01\n\x06\x45xport\x12\x41.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest\x1a\x42.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse\"\x00\x42\x9c\x01\n)io.opentelemetry.proto.collector.trace.v1B\x11TraceServiceProtoP\x01Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\xaa\x02&OpenTelemetry.Proto.Collector.Trace.V1b\x06proto3') - - - -_EXPORTTRACESERVICEREQUEST = DESCRIPTOR.message_types_by_name['ExportTraceServiceRequest'] -_EXPORTTRACESERVICERESPONSE = DESCRIPTOR.message_types_by_name['ExportTraceServiceResponse'] -_EXPORTTRACEPARTIALSUCCESS = DESCRIPTOR.message_types_by_name['ExportTracePartialSuccess'] -ExportTraceServiceRequest = _reflection.GeneratedProtocolMessageType('ExportTraceServiceRequest', (_message.Message,), { - 'DESCRIPTOR' : _EXPORTTRACESERVICEREQUEST, - '__module__' : 'opentelemetry.proto.collector.trace.v1.trace_service_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest) - }) -_sym_db.RegisterMessage(ExportTraceServiceRequest) - -ExportTraceServiceResponse = _reflection.GeneratedProtocolMessageType('ExportTraceServiceResponse', (_message.Message,), { - 'DESCRIPTOR' : _EXPORTTRACESERVICERESPONSE, - '__module__' : 'opentelemetry.proto.collector.trace.v1.trace_service_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse) - }) -_sym_db.RegisterMessage(ExportTraceServiceResponse) - -ExportTracePartialSuccess = _reflection.GeneratedProtocolMessageType('ExportTracePartialSuccess', (_message.Message,), { - 'DESCRIPTOR' : _EXPORTTRACEPARTIALSUCCESS, - '__module__' : 'opentelemetry.proto.collector.trace.v1.trace_service_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.collector.trace.v1.ExportTracePartialSuccess) - }) -_sym_db.RegisterMessage(ExportTracePartialSuccess) - -_TRACESERVICE = DESCRIPTOR.services_by_name['TraceService'] -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n)io.opentelemetry.proto.collector.trace.v1B\021TraceServiceProtoP\001Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\252\002&OpenTelemetry.Proto.Collector.Trace.V1' - _EXPORTTRACESERVICEREQUEST._serialized_start=144 - _EXPORTTRACESERVICEREQUEST._serialized_end=240 - _EXPORTTRACESERVICERESPONSE._serialized_start=242 - _EXPORTTRACESERVICERESPONSE._serialized_end=362 - _EXPORTTRACEPARTIALSUCCESS._serialized_start=364 - _EXPORTTRACEPARTIALSUCCESS._serialized_end=438 - _TRACESERVICE._serialized_start=441 - _TRACESERVICE._serialized_end=603 +from opentelemetry.proto.trace.v1 import ( + trace_pb2 as opentelemetry_dot_proto_dot_trace_dot_v1_dot_trace__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n:opentelemetry/proto/collector/trace/v1/trace_service.proto\x12&opentelemetry.proto.collector.trace.v1\x1a(opentelemetry/proto/trace/v1/trace.proto"`\n\x19\x45xportTraceServiceRequest\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans"x\n\x1a\x45xportTraceServiceResponse\x12Z\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x41.opentelemetry.proto.collector.trace.v1.ExportTracePartialSuccess"J\n\x19\x45xportTracePartialSuccess\x12\x16\n\x0erejected_spans\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xa2\x01\n\x0cTraceService\x12\x91\x01\n\x06\x45xport\x12\x41.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest\x1a\x42.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse"\x00\x42\x9c\x01\n)io.opentelemetry.proto.collector.trace.v1B\x11TraceServiceProtoP\x01Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\xaa\x02&OpenTelemetry.Proto.Collector.Trace.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "opentelemetry.proto.collector.trace.v1.trace_service_pb2", + _globals, +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n)io.opentelemetry.proto.collector.trace.v1B\021TraceServiceProtoP\001Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\252\002&OpenTelemetry.Proto.Collector.Trace.V1" + ) + _globals["_EXPORTTRACESERVICEREQUEST"]._serialized_start = 144 + _globals["_EXPORTTRACESERVICEREQUEST"]._serialized_end = 240 + _globals["_EXPORTTRACESERVICERESPONSE"]._serialized_start = 242 + _globals["_EXPORTTRACESERVICERESPONSE"]._serialized_end = 362 + _globals["_EXPORTTRACEPARTIALSUCCESS"]._serialized_start = 364 + _globals["_EXPORTTRACEPARTIALSUCCESS"]._serialized_end = 438 + _globals["_TRACESERVICE"]._serialized_start = 441 + _globals["_TRACESERVICE"]._serialized_end = 603 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi index 4e2d064ee79..263fc690e45 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi @@ -1,38 +1,77 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file +Copyright 2019, OpenTelemetry Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. """ + import builtins +import collections.abc import google.protobuf.descriptor import google.protobuf.internal.containers import google.protobuf.message import opentelemetry.proto.trace.v1.trace_pb2 -import typing -import typing_extensions +import sys -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final class ExportTraceServiceRequest(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor + RESOURCE_SPANS_FIELD_NUMBER: builtins.int @property - def resource_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans]: + def resource_spans( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans + ]: """An array of ResourceSpans. For data coming from a single resource this array will typically contain one element. Intermediary nodes (such as OpenTelemetry Collector) that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - pass - def __init__(self, + + def __init__( + self, *, - resource_spans : typing.Optional[typing.Iterable[opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans]] = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["resource_spans",b"resource_spans"]) -> None: ... + resource_spans: ( + collections.abc.Iterable[ + opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans + ] + | None + ) = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "resource_spans", b"resource_spans" + ], + ) -> None: ... + global___ExportTraceServiceRequest = ExportTraceServiceRequest +@typing_extensions.final class ExportTraceServiceResponse(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor + PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int @property def partial_success(self) -> global___ExportTracePartialSuccess: @@ -52,27 +91,40 @@ class ExportTraceServiceResponse(google.protobuf.message.Message): `error_message` = "") is equivalent to it not being set/present. Senders SHOULD interpret it the same way as in the full success case. """ - pass - def __init__(self, + + def __init__( + self, *, - partial_success : typing.Optional[global___ExportTracePartialSuccess] = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["partial_success",b"partial_success"]) -> None: ... + partial_success: global___ExportTracePartialSuccess | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "partial_success", b"partial_success" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "partial_success", b"partial_success" + ], + ) -> None: ... + global___ExportTraceServiceResponse = ExportTraceServiceResponse +@typing_extensions.final class ExportTracePartialSuccess(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor + REJECTED_SPANS_FIELD_NUMBER: builtins.int ERROR_MESSAGE_FIELD_NUMBER: builtins.int - rejected_spans: builtins.int = ... + rejected_spans: builtins.int """The number of rejected spans. A `rejected_` field holding a `0` value indicates that the request was fully accepted. """ - - error_message: typing.Text = ... + error_message: builtins.str """A developer-facing human-readable message in English. It should be used either to explain why the server rejected parts of the data during a partial success or to convey warnings/suggestions during a full success. The message @@ -81,11 +133,20 @@ class ExportTracePartialSuccess(google.protobuf.message.Message): error_message is an optional field. An error_message with an empty value is equivalent to it not being set. """ - - def __init__(self, + def __init__( + self, *, - rejected_spans : builtins.int = ..., - error_message : typing.Text = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["error_message",b"error_message","rejected_spans",b"rejected_spans"]) -> None: ... + rejected_spans: builtins.int = ..., + error_message: builtins.str = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "error_message", + b"error_message", + "rejected_spans", + b"rejected_spans", + ], + ) -> None: ... + global___ExportTracePartialSuccess = ExportTracePartialSuccess diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py index 81dbbe59f3b..f58f7ef6408 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py @@ -1,8 +1,38 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc +import warnings -from opentelemetry.proto.collector.trace.v1 import trace_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2 +from opentelemetry.proto.collector.trace.v1 import ( + trace_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2, +) + +GRPC_GENERATED_VERSION = "1.63.2" +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, + ) class TraceServiceStub(object): @@ -18,10 +48,11 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Export = channel.unary_unary( - '/opentelemetry.proto.collector.trace.v1.TraceService/Export', - request_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.SerializeToString, - response_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.FromString, - ) + "/opentelemetry.proto.collector.trace.v1.TraceService/Export", + request_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.SerializeToString, + response_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.FromString, + _registered_method=True, + ) class TraceServiceServicer(object): @@ -35,24 +66,26 @@ def Export(self, request, context): alive for the entire life of the application. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_TraceServiceServicer_to_server(servicer, server): rpc_method_handlers = { - 'Export': grpc.unary_unary_rpc_method_handler( - servicer.Export, - request_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.FromString, - response_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.SerializeToString, - ), + "Export": grpc.unary_unary_rpc_method_handler( + servicer.Export, + request_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.FromString, + response_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'opentelemetry.proto.collector.trace.v1.TraceService', rpc_method_handlers) + "opentelemetry.proto.collector.trace.v1.TraceService", + rpc_method_handlers, + ) server.add_generic_rpc_handlers((generic_handler,)) - # This class is part of an EXPERIMENTAL API. +# This class is part of an EXPERIMENTAL API. class TraceService(object): """Service that can be used to push spans between one Application instrumented with OpenTelemetry and a collector, or between a collector and a central collector (in this @@ -60,18 +93,31 @@ class TraceService(object): """ @staticmethod - def Export(request, + def Export( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/opentelemetry.proto.collector.trace.v1.TraceService/Export', + "/opentelemetry.proto.collector.trace.v1.TraceService/Export", opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.SerializeToString, opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) diff --git a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py index bec37ab2306..f55ae932065 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py @@ -1,75 +1,40 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: opentelemetry/proto/common/v1/common.proto +# Protobuf Python Version: 5.26.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n*opentelemetry/proto/common/v1/common.proto\x12\x1dopentelemetry.proto.common.v1\"\x8c\x02\n\x08\x41nyValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12@\n\x0b\x61rray_value\x18\x05 \x01(\x0b\x32).opentelemetry.proto.common.v1.ArrayValueH\x00\x12\x43\n\x0ckvlist_value\x18\x06 \x01(\x0b\x32+.opentelemetry.proto.common.v1.KeyValueListH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x42\x07\n\x05value\"E\n\nArrayValue\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\"G\n\x0cKeyValueList\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\"O\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\"\x94\x01\n\x14InstrumentationScope\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\rB{\n io.opentelemetry.proto.common.v1B\x0b\x43ommonProtoP\x01Z(go.opentelemetry.io/proto/otlp/common/v1\xaa\x02\x1dOpenTelemetry.Proto.Common.V1b\x06proto3') - - - -_ANYVALUE = DESCRIPTOR.message_types_by_name['AnyValue'] -_ARRAYVALUE = DESCRIPTOR.message_types_by_name['ArrayValue'] -_KEYVALUELIST = DESCRIPTOR.message_types_by_name['KeyValueList'] -_KEYVALUE = DESCRIPTOR.message_types_by_name['KeyValue'] -_INSTRUMENTATIONSCOPE = DESCRIPTOR.message_types_by_name['InstrumentationScope'] -AnyValue = _reflection.GeneratedProtocolMessageType('AnyValue', (_message.Message,), { - 'DESCRIPTOR' : _ANYVALUE, - '__module__' : 'opentelemetry.proto.common.v1.common_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.common.v1.AnyValue) - }) -_sym_db.RegisterMessage(AnyValue) - -ArrayValue = _reflection.GeneratedProtocolMessageType('ArrayValue', (_message.Message,), { - 'DESCRIPTOR' : _ARRAYVALUE, - '__module__' : 'opentelemetry.proto.common.v1.common_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.common.v1.ArrayValue) - }) -_sym_db.RegisterMessage(ArrayValue) - -KeyValueList = _reflection.GeneratedProtocolMessageType('KeyValueList', (_message.Message,), { - 'DESCRIPTOR' : _KEYVALUELIST, - '__module__' : 'opentelemetry.proto.common.v1.common_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.common.v1.KeyValueList) - }) -_sym_db.RegisterMessage(KeyValueList) - -KeyValue = _reflection.GeneratedProtocolMessageType('KeyValue', (_message.Message,), { - 'DESCRIPTOR' : _KEYVALUE, - '__module__' : 'opentelemetry.proto.common.v1.common_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.common.v1.KeyValue) - }) -_sym_db.RegisterMessage(KeyValue) - -InstrumentationScope = _reflection.GeneratedProtocolMessageType('InstrumentationScope', (_message.Message,), { - 'DESCRIPTOR' : _INSTRUMENTATIONSCOPE, - '__module__' : 'opentelemetry.proto.common.v1.common_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.common.v1.InstrumentationScope) - }) -_sym_db.RegisterMessage(InstrumentationScope) - -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n io.opentelemetry.proto.common.v1B\013CommonProtoP\001Z(go.opentelemetry.io/proto/otlp/common/v1\252\002\035OpenTelemetry.Proto.Common.V1' - _ANYVALUE._serialized_start=78 - _ANYVALUE._serialized_end=346 - _ARRAYVALUE._serialized_start=348 - _ARRAYVALUE._serialized_end=417 - _KEYVALUELIST._serialized_start=419 - _KEYVALUELIST._serialized_end=490 - _KEYVALUE._serialized_start=492 - _KEYVALUE._serialized_end=571 - _INSTRUMENTATIONSCOPE._serialized_start=574 - _INSTRUMENTATIONSCOPE._serialized_end=722 +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n*opentelemetry/proto/common/v1/common.proto\x12\x1dopentelemetry.proto.common.v1"\x8c\x02\n\x08\x41nyValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12@\n\x0b\x61rray_value\x18\x05 \x01(\x0b\x32).opentelemetry.proto.common.v1.ArrayValueH\x00\x12\x43\n\x0ckvlist_value\x18\x06 \x01(\x0b\x32+.opentelemetry.proto.common.v1.KeyValueListH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x42\x07\n\x05value"E\n\nArrayValue\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue"G\n\x0cKeyValueList\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue"O\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue"\x94\x01\n\x14InstrumentationScope\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\rB{\n io.opentelemetry.proto.common.v1B\x0b\x43ommonProtoP\x01Z(go.opentelemetry.io/proto/otlp/common/v1\xaa\x02\x1dOpenTelemetry.Proto.Common.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.common.v1.common_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n io.opentelemetry.proto.common.v1B\013CommonProtoP\001Z(go.opentelemetry.io/proto/otlp/common/v1\252\002\035OpenTelemetry.Proto.Common.V1" + ) + _globals["_ANYVALUE"]._serialized_start = 78 + _globals["_ANYVALUE"]._serialized_end = 346 + _globals["_ARRAYVALUE"]._serialized_start = 348 + _globals["_ARRAYVALUE"]._serialized_end = 417 + _globals["_KEYVALUELIST"]._serialized_start = 419 + _globals["_KEYVALUELIST"]._serialized_end = 490 + _globals["_KEYVALUE"]._serialized_start = 492 + _globals["_KEYVALUE"]._serialized_end = 571 + _globals["_INSTRUMENTATIONSCOPE"]._serialized_start = 574 + _globals["_INSTRUMENTATIONSCOPE"]._serialized_end = 722 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi index 304feec5abb..4d948c37938 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi @@ -1,22 +1,44 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file +Copyright 2019, OpenTelemetry Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. """ + import builtins +import collections.abc import google.protobuf.descriptor import google.protobuf.internal.containers import google.protobuf.message -import typing -import typing_extensions +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor +@typing_extensions.final class AnyValue(google.protobuf.message.Message): """AnyValue is used to represent any type of attribute value. AnyValue may contain a primitive value such as a string or integer or it may contain an arbitrary nested object containing arrays, key-value lists and primitives. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + STRING_VALUE_FIELD_NUMBER: builtins.int BOOL_VALUE_FIELD_NUMBER: builtins.int INT_VALUE_FIELD_NUMBER: builtins.int @@ -24,47 +46,114 @@ class AnyValue(google.protobuf.message.Message): ARRAY_VALUE_FIELD_NUMBER: builtins.int KVLIST_VALUE_FIELD_NUMBER: builtins.int BYTES_VALUE_FIELD_NUMBER: builtins.int - string_value: typing.Text = ... - bool_value: builtins.bool = ... - int_value: builtins.int = ... - double_value: builtins.float = ... + string_value: builtins.str + bool_value: builtins.bool + int_value: builtins.int + double_value: builtins.float @property def array_value(self) -> global___ArrayValue: ... @property def kvlist_value(self) -> global___KeyValueList: ... - bytes_value: builtins.bytes = ... - def __init__(self, + bytes_value: builtins.bytes + def __init__( + self, *, - string_value : typing.Text = ..., - bool_value : builtins.bool = ..., - int_value : builtins.int = ..., - double_value : builtins.float = ..., - array_value : typing.Optional[global___ArrayValue] = ..., - kvlist_value : typing.Optional[global___KeyValueList] = ..., - bytes_value : builtins.bytes = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["array_value",b"array_value","bool_value",b"bool_value","bytes_value",b"bytes_value","double_value",b"double_value","int_value",b"int_value","kvlist_value",b"kvlist_value","string_value",b"string_value","value",b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["array_value",b"array_value","bool_value",b"bool_value","bytes_value",b"bytes_value","double_value",b"double_value","int_value",b"int_value","kvlist_value",b"kvlist_value","string_value",b"string_value","value",b"value"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["string_value","bool_value","int_value","double_value","array_value","kvlist_value","bytes_value"]]: ... + string_value: builtins.str = ..., + bool_value: builtins.bool = ..., + int_value: builtins.int = ..., + double_value: builtins.float = ..., + array_value: global___ArrayValue | None = ..., + kvlist_value: global___KeyValueList | None = ..., + bytes_value: builtins.bytes = ..., + ) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "array_value", + b"array_value", + "bool_value", + b"bool_value", + "bytes_value", + b"bytes_value", + "double_value", + b"double_value", + "int_value", + b"int_value", + "kvlist_value", + b"kvlist_value", + "string_value", + b"string_value", + "value", + b"value", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "array_value", + b"array_value", + "bool_value", + b"bool_value", + "bytes_value", + b"bytes_value", + "double_value", + b"double_value", + "int_value", + b"int_value", + "kvlist_value", + b"kvlist_value", + "string_value", + b"string_value", + "value", + b"value", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["value", b"value"] + ) -> ( + typing_extensions.Literal[ + "string_value", + "bool_value", + "int_value", + "double_value", + "array_value", + "kvlist_value", + "bytes_value", + ] + | None + ): ... + global___AnyValue = AnyValue +@typing_extensions.final class ArrayValue(google.protobuf.message.Message): """ArrayValue is a list of AnyValue messages. We need ArrayValue as a message since oneof in AnyValue does not allow repeated fields. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + VALUES_FIELD_NUMBER: builtins.int @property - def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AnyValue]: + def values( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___AnyValue + ]: """Array of values. The array may be empty (contain 0 elements).""" - pass - def __init__(self, + + def __init__( + self, *, - values : typing.Optional[typing.Iterable[global___AnyValue]] = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ... + values: collections.abc.Iterable[global___AnyValue] | None = ..., + ) -> None: ... + def ClearField( + self, field_name: typing_extensions.Literal["values", b"values"] + ) -> None: ... + global___ArrayValue = ArrayValue +@typing_extensions.final class KeyValueList(google.protobuf.message.Message): """KeyValueList is a list of KeyValue messages. We need KeyValueList as a message since `oneof` in AnyValue does not allow repeated fields. Everywhere else where we need @@ -72,69 +161,110 @@ class KeyValueList(google.protobuf.message.Message): avoid unnecessary extra wrapping (which slows down the protocol). The 2 approaches are semantically equivalent. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + VALUES_FIELD_NUMBER: builtins.int @property - def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]: + def values( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___KeyValue + ]: """A collection of key/value pairs of key-value pairs. The list may be empty (may contain 0 elements). The keys MUST be unique (it is not allowed to have more than one value with the same key). """ - pass - def __init__(self, + + def __init__( + self, *, - values : typing.Optional[typing.Iterable[global___KeyValue]] = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ... + values: collections.abc.Iterable[global___KeyValue] | None = ..., + ) -> None: ... + def ClearField( + self, field_name: typing_extensions.Literal["values", b"values"] + ) -> None: ... + global___KeyValueList = KeyValueList +@typing_extensions.final class KeyValue(google.protobuf.message.Message): """KeyValue is a key-value pair that is used to store Span attributes, Link attributes, etc. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + KEY_FIELD_NUMBER: builtins.int VALUE_FIELD_NUMBER: builtins.int - key: typing.Text = ... + key: builtins.str @property def value(self) -> global___AnyValue: ... - def __init__(self, + def __init__( + self, *, - key : typing.Text = ..., - value : typing.Optional[global___AnyValue] = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ... + key: builtins.str = ..., + value: global___AnyValue | None = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["value", b"value"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "key", b"key", "value", b"value" + ], + ) -> None: ... + global___KeyValue = KeyValue +@typing_extensions.final class InstrumentationScope(google.protobuf.message.Message): """InstrumentationScope is a message representing the instrumentation scope information such as the fully qualified name and version. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + NAME_FIELD_NUMBER: builtins.int VERSION_FIELD_NUMBER: builtins.int ATTRIBUTES_FIELD_NUMBER: builtins.int DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int - name: typing.Text = ... + name: builtins.str """An empty instrumentation scope name means the name is unknown.""" - - version: typing.Text = ... + version: builtins.str @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]: + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___KeyValue + ]: """Additional attributes that describe the scope. [Optional]. Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - pass - dropped_attributes_count: builtins.int = ... - def __init__(self, + dropped_attributes_count: builtins.int + def __init__( + self, *, - name : typing.Text = ..., - version : typing.Text = ..., - attributes : typing.Optional[typing.Iterable[global___KeyValue]] = ..., - dropped_attributes_count : builtins.int = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","dropped_attributes_count",b"dropped_attributes_count","name",b"name","version",b"version"]) -> None: ... + name: builtins.str = ..., + version: builtins.str = ..., + attributes: collections.abc.Iterable[global___KeyValue] | None = ..., + dropped_attributes_count: builtins.int = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "attributes", + b"attributes", + "dropped_attributes_count", + b"dropped_attributes_count", + "name", + b"name", + "version", + b"version", + ], + ) -> None: ... + global___InstrumentationScope = InstrumentationScope diff --git a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py index 90b71871556..715a741a2c6 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py @@ -1,103 +1,50 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: opentelemetry/proto/logs/v1/logs.proto +# Protobuf Python Version: 5.26.1 """Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2 -from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&opentelemetry/proto/logs/v1/logs.proto\x12\x1bopentelemetry.proto.logs.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"L\n\x08LogsData\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"\xa3\x01\n\x0cResourceLogs\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12:\n\nscope_logs\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.ScopeLogs\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\xa0\x01\n\tScopeLogs\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12;\n\x0blog_records\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.LogRecord\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xef\x02\n\tLogRecord\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x1f\n\x17observed_time_unix_nano\x18\x0b \x01(\x06\x12\x44\n\x0fseverity_number\x18\x02 \x01(\x0e\x32+.opentelemetry.proto.logs.v1.SeverityNumber\x12\x15\n\rseverity_text\x18\x03 \x01(\t\x12\x35\n\x04\x62ody\x18\x05 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\x12;\n\nattributes\x18\x06 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x07 \x01(\r\x12\r\n\x05\x66lags\x18\x08 \x01(\x07\x12\x10\n\x08trace_id\x18\t \x01(\x0c\x12\x0f\n\x07span_id\x18\n \x01(\x0cJ\x04\x08\x04\x10\x05*\xc3\x05\n\x0eSeverityNumber\x12\x1f\n\x1bSEVERITY_NUMBER_UNSPECIFIED\x10\x00\x12\x19\n\x15SEVERITY_NUMBER_TRACE\x10\x01\x12\x1a\n\x16SEVERITY_NUMBER_TRACE2\x10\x02\x12\x1a\n\x16SEVERITY_NUMBER_TRACE3\x10\x03\x12\x1a\n\x16SEVERITY_NUMBER_TRACE4\x10\x04\x12\x19\n\x15SEVERITY_NUMBER_DEBUG\x10\x05\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG2\x10\x06\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG3\x10\x07\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG4\x10\x08\x12\x18\n\x14SEVERITY_NUMBER_INFO\x10\t\x12\x19\n\x15SEVERITY_NUMBER_INFO2\x10\n\x12\x19\n\x15SEVERITY_NUMBER_INFO3\x10\x0b\x12\x19\n\x15SEVERITY_NUMBER_INFO4\x10\x0c\x12\x18\n\x14SEVERITY_NUMBER_WARN\x10\r\x12\x19\n\x15SEVERITY_NUMBER_WARN2\x10\x0e\x12\x19\n\x15SEVERITY_NUMBER_WARN3\x10\x0f\x12\x19\n\x15SEVERITY_NUMBER_WARN4\x10\x10\x12\x19\n\x15SEVERITY_NUMBER_ERROR\x10\x11\x12\x1a\n\x16SEVERITY_NUMBER_ERROR2\x10\x12\x12\x1a\n\x16SEVERITY_NUMBER_ERROR3\x10\x13\x12\x1a\n\x16SEVERITY_NUMBER_ERROR4\x10\x14\x12\x19\n\x15SEVERITY_NUMBER_FATAL\x10\x15\x12\x1a\n\x16SEVERITY_NUMBER_FATAL2\x10\x16\x12\x1a\n\x16SEVERITY_NUMBER_FATAL3\x10\x17\x12\x1a\n\x16SEVERITY_NUMBER_FATAL4\x10\x18*Y\n\x0eLogRecordFlags\x12\x1f\n\x1bLOG_RECORD_FLAGS_DO_NOT_USE\x10\x00\x12&\n!LOG_RECORD_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x42s\n\x1eio.opentelemetry.proto.logs.v1B\tLogsProtoP\x01Z&go.opentelemetry.io/proto/otlp/logs/v1\xaa\x02\x1bOpenTelemetry.Proto.Logs.V1b\x06proto3') - -_SEVERITYNUMBER = DESCRIPTOR.enum_types_by_name['SeverityNumber'] -SeverityNumber = enum_type_wrapper.EnumTypeWrapper(_SEVERITYNUMBER) -_LOGRECORDFLAGS = DESCRIPTOR.enum_types_by_name['LogRecordFlags'] -LogRecordFlags = enum_type_wrapper.EnumTypeWrapper(_LOGRECORDFLAGS) -SEVERITY_NUMBER_UNSPECIFIED = 0 -SEVERITY_NUMBER_TRACE = 1 -SEVERITY_NUMBER_TRACE2 = 2 -SEVERITY_NUMBER_TRACE3 = 3 -SEVERITY_NUMBER_TRACE4 = 4 -SEVERITY_NUMBER_DEBUG = 5 -SEVERITY_NUMBER_DEBUG2 = 6 -SEVERITY_NUMBER_DEBUG3 = 7 -SEVERITY_NUMBER_DEBUG4 = 8 -SEVERITY_NUMBER_INFO = 9 -SEVERITY_NUMBER_INFO2 = 10 -SEVERITY_NUMBER_INFO3 = 11 -SEVERITY_NUMBER_INFO4 = 12 -SEVERITY_NUMBER_WARN = 13 -SEVERITY_NUMBER_WARN2 = 14 -SEVERITY_NUMBER_WARN3 = 15 -SEVERITY_NUMBER_WARN4 = 16 -SEVERITY_NUMBER_ERROR = 17 -SEVERITY_NUMBER_ERROR2 = 18 -SEVERITY_NUMBER_ERROR3 = 19 -SEVERITY_NUMBER_ERROR4 = 20 -SEVERITY_NUMBER_FATAL = 21 -SEVERITY_NUMBER_FATAL2 = 22 -SEVERITY_NUMBER_FATAL3 = 23 -SEVERITY_NUMBER_FATAL4 = 24 -LOG_RECORD_FLAGS_DO_NOT_USE = 0 -LOG_RECORD_FLAGS_TRACE_FLAGS_MASK = 255 - - -_LOGSDATA = DESCRIPTOR.message_types_by_name['LogsData'] -_RESOURCELOGS = DESCRIPTOR.message_types_by_name['ResourceLogs'] -_SCOPELOGS = DESCRIPTOR.message_types_by_name['ScopeLogs'] -_LOGRECORD = DESCRIPTOR.message_types_by_name['LogRecord'] -LogsData = _reflection.GeneratedProtocolMessageType('LogsData', (_message.Message,), { - 'DESCRIPTOR' : _LOGSDATA, - '__module__' : 'opentelemetry.proto.logs.v1.logs_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.logs.v1.LogsData) - }) -_sym_db.RegisterMessage(LogsData) - -ResourceLogs = _reflection.GeneratedProtocolMessageType('ResourceLogs', (_message.Message,), { - 'DESCRIPTOR' : _RESOURCELOGS, - '__module__' : 'opentelemetry.proto.logs.v1.logs_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.logs.v1.ResourceLogs) - }) -_sym_db.RegisterMessage(ResourceLogs) - -ScopeLogs = _reflection.GeneratedProtocolMessageType('ScopeLogs', (_message.Message,), { - 'DESCRIPTOR' : _SCOPELOGS, - '__module__' : 'opentelemetry.proto.logs.v1.logs_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.logs.v1.ScopeLogs) - }) -_sym_db.RegisterMessage(ScopeLogs) - -LogRecord = _reflection.GeneratedProtocolMessageType('LogRecord', (_message.Message,), { - 'DESCRIPTOR' : _LOGRECORD, - '__module__' : 'opentelemetry.proto.logs.v1.logs_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.logs.v1.LogRecord) - }) -_sym_db.RegisterMessage(LogRecord) - -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\036io.opentelemetry.proto.logs.v1B\tLogsProtoP\001Z&go.opentelemetry.io/proto/otlp/logs/v1\252\002\033OpenTelemetry.Proto.Logs.V1' - _SEVERITYNUMBER._serialized_start=941 - _SEVERITYNUMBER._serialized_end=1648 - _LOGRECORDFLAGS._serialized_start=1650 - _LOGRECORDFLAGS._serialized_end=1739 - _LOGSDATA._serialized_start=163 - _LOGSDATA._serialized_end=239 - _RESOURCELOGS._serialized_start=242 - _RESOURCELOGS._serialized_end=405 - _SCOPELOGS._serialized_start=408 - _SCOPELOGS._serialized_end=568 - _LOGRECORD._serialized_start=571 - _LOGRECORD._serialized_end=938 +from opentelemetry.proto.common.v1 import ( + common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, +) +from opentelemetry.proto.resource.v1 import ( + resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n&opentelemetry/proto/logs/v1/logs.proto\x12\x1bopentelemetry.proto.logs.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto"L\n\x08LogsData\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs"\xa3\x01\n\x0cResourceLogs\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12:\n\nscope_logs\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.ScopeLogs\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07"\xa0\x01\n\tScopeLogs\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12;\n\x0blog_records\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.LogRecord\x12\x12\n\nschema_url\x18\x03 \x01(\t"\xef\x02\n\tLogRecord\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x1f\n\x17observed_time_unix_nano\x18\x0b \x01(\x06\x12\x44\n\x0fseverity_number\x18\x02 \x01(\x0e\x32+.opentelemetry.proto.logs.v1.SeverityNumber\x12\x15\n\rseverity_text\x18\x03 \x01(\t\x12\x35\n\x04\x62ody\x18\x05 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\x12;\n\nattributes\x18\x06 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x07 \x01(\r\x12\r\n\x05\x66lags\x18\x08 \x01(\x07\x12\x10\n\x08trace_id\x18\t \x01(\x0c\x12\x0f\n\x07span_id\x18\n \x01(\x0cJ\x04\x08\x04\x10\x05*\xc3\x05\n\x0eSeverityNumber\x12\x1f\n\x1bSEVERITY_NUMBER_UNSPECIFIED\x10\x00\x12\x19\n\x15SEVERITY_NUMBER_TRACE\x10\x01\x12\x1a\n\x16SEVERITY_NUMBER_TRACE2\x10\x02\x12\x1a\n\x16SEVERITY_NUMBER_TRACE3\x10\x03\x12\x1a\n\x16SEVERITY_NUMBER_TRACE4\x10\x04\x12\x19\n\x15SEVERITY_NUMBER_DEBUG\x10\x05\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG2\x10\x06\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG3\x10\x07\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG4\x10\x08\x12\x18\n\x14SEVERITY_NUMBER_INFO\x10\t\x12\x19\n\x15SEVERITY_NUMBER_INFO2\x10\n\x12\x19\n\x15SEVERITY_NUMBER_INFO3\x10\x0b\x12\x19\n\x15SEVERITY_NUMBER_INFO4\x10\x0c\x12\x18\n\x14SEVERITY_NUMBER_WARN\x10\r\x12\x19\n\x15SEVERITY_NUMBER_WARN2\x10\x0e\x12\x19\n\x15SEVERITY_NUMBER_WARN3\x10\x0f\x12\x19\n\x15SEVERITY_NUMBER_WARN4\x10\x10\x12\x19\n\x15SEVERITY_NUMBER_ERROR\x10\x11\x12\x1a\n\x16SEVERITY_NUMBER_ERROR2\x10\x12\x12\x1a\n\x16SEVERITY_NUMBER_ERROR3\x10\x13\x12\x1a\n\x16SEVERITY_NUMBER_ERROR4\x10\x14\x12\x19\n\x15SEVERITY_NUMBER_FATAL\x10\x15\x12\x1a\n\x16SEVERITY_NUMBER_FATAL2\x10\x16\x12\x1a\n\x16SEVERITY_NUMBER_FATAL3\x10\x17\x12\x1a\n\x16SEVERITY_NUMBER_FATAL4\x10\x18*Y\n\x0eLogRecordFlags\x12\x1f\n\x1bLOG_RECORD_FLAGS_DO_NOT_USE\x10\x00\x12&\n!LOG_RECORD_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x42s\n\x1eio.opentelemetry.proto.logs.v1B\tLogsProtoP\x01Z&go.opentelemetry.io/proto/otlp/logs/v1\xaa\x02\x1bOpenTelemetry.Proto.Logs.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.logs.v1.logs_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n\036io.opentelemetry.proto.logs.v1B\tLogsProtoP\001Z&go.opentelemetry.io/proto/otlp/logs/v1\252\002\033OpenTelemetry.Proto.Logs.V1" + ) + _globals["_SEVERITYNUMBER"]._serialized_start = 941 + _globals["_SEVERITYNUMBER"]._serialized_end = 1648 + _globals["_LOGRECORDFLAGS"]._serialized_start = 1650 + _globals["_LOGRECORDFLAGS"]._serialized_end = 1739 + _globals["_LOGSDATA"]._serialized_start = 163 + _globals["_LOGSDATA"]._serialized_end = 239 + _globals["_RESOURCELOGS"]._serialized_start = 242 + _globals["_RESOURCELOGS"]._serialized_end = 405 + _globals["_SCOPELOGS"]._serialized_start = 408 + _globals["_SCOPELOGS"]._serialized_end = 568 + _globals["_LOGRECORD"]._serialized_start = 571 + _globals["_LOGRECORD"]._serialized_end = 938 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi index 5aee721a804..b0232c2b6b3 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi @@ -1,85 +1,131 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file +Copyright 2020, OpenTelemetry Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. """ + import builtins +import collections.abc import google.protobuf.descriptor import google.protobuf.internal.containers import google.protobuf.internal.enum_type_wrapper import google.protobuf.message import opentelemetry.proto.common.v1.common_pb2 import opentelemetry.proto.resource.v1.resource_pb2 +import sys import typing -import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -class SeverityNumber(_SeverityNumber, metaclass=_SeverityNumberEnumTypeWrapper): - """Possible values for LogRecord.SeverityNumber.""" - pass class _SeverityNumber: - V = typing.NewType('V', builtins.int) -class _SeverityNumberEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SeverityNumber.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - SEVERITY_NUMBER_UNSPECIFIED = SeverityNumber.V(0) + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _SeverityNumberEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + _SeverityNumber.ValueType + ], + builtins.type, +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + SEVERITY_NUMBER_UNSPECIFIED: _SeverityNumber.ValueType # 0 """UNSPECIFIED is the default SeverityNumber, it MUST NOT be used.""" + SEVERITY_NUMBER_TRACE: _SeverityNumber.ValueType # 1 + SEVERITY_NUMBER_TRACE2: _SeverityNumber.ValueType # 2 + SEVERITY_NUMBER_TRACE3: _SeverityNumber.ValueType # 3 + SEVERITY_NUMBER_TRACE4: _SeverityNumber.ValueType # 4 + SEVERITY_NUMBER_DEBUG: _SeverityNumber.ValueType # 5 + SEVERITY_NUMBER_DEBUG2: _SeverityNumber.ValueType # 6 + SEVERITY_NUMBER_DEBUG3: _SeverityNumber.ValueType # 7 + SEVERITY_NUMBER_DEBUG4: _SeverityNumber.ValueType # 8 + SEVERITY_NUMBER_INFO: _SeverityNumber.ValueType # 9 + SEVERITY_NUMBER_INFO2: _SeverityNumber.ValueType # 10 + SEVERITY_NUMBER_INFO3: _SeverityNumber.ValueType # 11 + SEVERITY_NUMBER_INFO4: _SeverityNumber.ValueType # 12 + SEVERITY_NUMBER_WARN: _SeverityNumber.ValueType # 13 + SEVERITY_NUMBER_WARN2: _SeverityNumber.ValueType # 14 + SEVERITY_NUMBER_WARN3: _SeverityNumber.ValueType # 15 + SEVERITY_NUMBER_WARN4: _SeverityNumber.ValueType # 16 + SEVERITY_NUMBER_ERROR: _SeverityNumber.ValueType # 17 + SEVERITY_NUMBER_ERROR2: _SeverityNumber.ValueType # 18 + SEVERITY_NUMBER_ERROR3: _SeverityNumber.ValueType # 19 + SEVERITY_NUMBER_ERROR4: _SeverityNumber.ValueType # 20 + SEVERITY_NUMBER_FATAL: _SeverityNumber.ValueType # 21 + SEVERITY_NUMBER_FATAL2: _SeverityNumber.ValueType # 22 + SEVERITY_NUMBER_FATAL3: _SeverityNumber.ValueType # 23 + SEVERITY_NUMBER_FATAL4: _SeverityNumber.ValueType # 24 + +class SeverityNumber( + _SeverityNumber, metaclass=_SeverityNumberEnumTypeWrapper +): + """Possible values for LogRecord.SeverityNumber.""" - SEVERITY_NUMBER_TRACE = SeverityNumber.V(1) - SEVERITY_NUMBER_TRACE2 = SeverityNumber.V(2) - SEVERITY_NUMBER_TRACE3 = SeverityNumber.V(3) - SEVERITY_NUMBER_TRACE4 = SeverityNumber.V(4) - SEVERITY_NUMBER_DEBUG = SeverityNumber.V(5) - SEVERITY_NUMBER_DEBUG2 = SeverityNumber.V(6) - SEVERITY_NUMBER_DEBUG3 = SeverityNumber.V(7) - SEVERITY_NUMBER_DEBUG4 = SeverityNumber.V(8) - SEVERITY_NUMBER_INFO = SeverityNumber.V(9) - SEVERITY_NUMBER_INFO2 = SeverityNumber.V(10) - SEVERITY_NUMBER_INFO3 = SeverityNumber.V(11) - SEVERITY_NUMBER_INFO4 = SeverityNumber.V(12) - SEVERITY_NUMBER_WARN = SeverityNumber.V(13) - SEVERITY_NUMBER_WARN2 = SeverityNumber.V(14) - SEVERITY_NUMBER_WARN3 = SeverityNumber.V(15) - SEVERITY_NUMBER_WARN4 = SeverityNumber.V(16) - SEVERITY_NUMBER_ERROR = SeverityNumber.V(17) - SEVERITY_NUMBER_ERROR2 = SeverityNumber.V(18) - SEVERITY_NUMBER_ERROR3 = SeverityNumber.V(19) - SEVERITY_NUMBER_ERROR4 = SeverityNumber.V(20) - SEVERITY_NUMBER_FATAL = SeverityNumber.V(21) - SEVERITY_NUMBER_FATAL2 = SeverityNumber.V(22) - SEVERITY_NUMBER_FATAL3 = SeverityNumber.V(23) - SEVERITY_NUMBER_FATAL4 = SeverityNumber.V(24) - -SEVERITY_NUMBER_UNSPECIFIED = SeverityNumber.V(0) +SEVERITY_NUMBER_UNSPECIFIED: SeverityNumber.ValueType # 0 """UNSPECIFIED is the default SeverityNumber, it MUST NOT be used.""" - -SEVERITY_NUMBER_TRACE = SeverityNumber.V(1) -SEVERITY_NUMBER_TRACE2 = SeverityNumber.V(2) -SEVERITY_NUMBER_TRACE3 = SeverityNumber.V(3) -SEVERITY_NUMBER_TRACE4 = SeverityNumber.V(4) -SEVERITY_NUMBER_DEBUG = SeverityNumber.V(5) -SEVERITY_NUMBER_DEBUG2 = SeverityNumber.V(6) -SEVERITY_NUMBER_DEBUG3 = SeverityNumber.V(7) -SEVERITY_NUMBER_DEBUG4 = SeverityNumber.V(8) -SEVERITY_NUMBER_INFO = SeverityNumber.V(9) -SEVERITY_NUMBER_INFO2 = SeverityNumber.V(10) -SEVERITY_NUMBER_INFO3 = SeverityNumber.V(11) -SEVERITY_NUMBER_INFO4 = SeverityNumber.V(12) -SEVERITY_NUMBER_WARN = SeverityNumber.V(13) -SEVERITY_NUMBER_WARN2 = SeverityNumber.V(14) -SEVERITY_NUMBER_WARN3 = SeverityNumber.V(15) -SEVERITY_NUMBER_WARN4 = SeverityNumber.V(16) -SEVERITY_NUMBER_ERROR = SeverityNumber.V(17) -SEVERITY_NUMBER_ERROR2 = SeverityNumber.V(18) -SEVERITY_NUMBER_ERROR3 = SeverityNumber.V(19) -SEVERITY_NUMBER_ERROR4 = SeverityNumber.V(20) -SEVERITY_NUMBER_FATAL = SeverityNumber.V(21) -SEVERITY_NUMBER_FATAL2 = SeverityNumber.V(22) -SEVERITY_NUMBER_FATAL3 = SeverityNumber.V(23) -SEVERITY_NUMBER_FATAL4 = SeverityNumber.V(24) +SEVERITY_NUMBER_TRACE: SeverityNumber.ValueType # 1 +SEVERITY_NUMBER_TRACE2: SeverityNumber.ValueType # 2 +SEVERITY_NUMBER_TRACE3: SeverityNumber.ValueType # 3 +SEVERITY_NUMBER_TRACE4: SeverityNumber.ValueType # 4 +SEVERITY_NUMBER_DEBUG: SeverityNumber.ValueType # 5 +SEVERITY_NUMBER_DEBUG2: SeverityNumber.ValueType # 6 +SEVERITY_NUMBER_DEBUG3: SeverityNumber.ValueType # 7 +SEVERITY_NUMBER_DEBUG4: SeverityNumber.ValueType # 8 +SEVERITY_NUMBER_INFO: SeverityNumber.ValueType # 9 +SEVERITY_NUMBER_INFO2: SeverityNumber.ValueType # 10 +SEVERITY_NUMBER_INFO3: SeverityNumber.ValueType # 11 +SEVERITY_NUMBER_INFO4: SeverityNumber.ValueType # 12 +SEVERITY_NUMBER_WARN: SeverityNumber.ValueType # 13 +SEVERITY_NUMBER_WARN2: SeverityNumber.ValueType # 14 +SEVERITY_NUMBER_WARN3: SeverityNumber.ValueType # 15 +SEVERITY_NUMBER_WARN4: SeverityNumber.ValueType # 16 +SEVERITY_NUMBER_ERROR: SeverityNumber.ValueType # 17 +SEVERITY_NUMBER_ERROR2: SeverityNumber.ValueType # 18 +SEVERITY_NUMBER_ERROR3: SeverityNumber.ValueType # 19 +SEVERITY_NUMBER_ERROR4: SeverityNumber.ValueType # 20 +SEVERITY_NUMBER_FATAL: SeverityNumber.ValueType # 21 +SEVERITY_NUMBER_FATAL2: SeverityNumber.ValueType # 22 +SEVERITY_NUMBER_FATAL3: SeverityNumber.ValueType # 23 +SEVERITY_NUMBER_FATAL4: SeverityNumber.ValueType # 24 global___SeverityNumber = SeverityNumber +class _LogRecordFlags: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _LogRecordFlagsEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + _LogRecordFlags.ValueType + ], + builtins.type, +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + LOG_RECORD_FLAGS_DO_NOT_USE: _LogRecordFlags.ValueType # 0 + """The zero value for the enum. Should not be used for comparisons. + Instead use bitwise "and" with the appropriate mask as shown above. + """ + LOG_RECORD_FLAGS_TRACE_FLAGS_MASK: _LogRecordFlags.ValueType # 255 + """Bits 0-7 are used for trace flags.""" -class LogRecordFlags(_LogRecordFlags, metaclass=_LogRecordFlagsEnumTypeWrapper): +class LogRecordFlags( + _LogRecordFlags, metaclass=_LogRecordFlagsEnumTypeWrapper +): """LogRecordFlags represents constants used to interpret the LogRecord.flags field, which is protobuf 'fixed32' type and is to be used as bit-fields. Each non-zero value defined in this enum is @@ -88,31 +134,16 @@ class LogRecordFlags(_LogRecordFlags, metaclass=_LogRecordFlagsEnumTypeWrapper): (logRecord.flags & LOG_RECORD_FLAGS_TRACE_FLAGS_MASK) """ - pass -class _LogRecordFlags: - V = typing.NewType('V', builtins.int) -class _LogRecordFlagsEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_LogRecordFlags.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - LOG_RECORD_FLAGS_DO_NOT_USE = LogRecordFlags.V(0) - """The zero value for the enum. Should not be used for comparisons. - Instead use bitwise "and" with the appropriate mask as shown above. - """ - - LOG_RECORD_FLAGS_TRACE_FLAGS_MASK = LogRecordFlags.V(255) - """Bits 0-7 are used for trace flags.""" - -LOG_RECORD_FLAGS_DO_NOT_USE = LogRecordFlags.V(0) +LOG_RECORD_FLAGS_DO_NOT_USE: LogRecordFlags.ValueType # 0 """The zero value for the enum. Should not be used for comparisons. Instead use bitwise "and" with the appropriate mask as shown above. """ - -LOG_RECORD_FLAGS_TRACE_FLAGS_MASK = LogRecordFlags.V(255) +LOG_RECORD_FLAGS_TRACE_FLAGS_MASK: LogRecordFlags.ValueType # 255 """Bits 0-7 are used for trace flags.""" - global___LogRecordFlags = LogRecordFlags - +@typing_extensions.final class LogsData(google.protobuf.message.Message): """LogsData represents the logs data that can be stored in a persistent storage, OR can be embedded by other protocols that transfer OTLP logs data but do not @@ -125,97 +156,162 @@ class LogsData(google.protobuf.message.Message): When new fields are added into this message, the OTLP request MUST be updated as well. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + RESOURCE_LOGS_FIELD_NUMBER: builtins.int @property - def resource_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceLogs]: + def resource_logs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___ResourceLogs + ]: """An array of ResourceLogs. For data coming from a single resource this array will typically contain one element. Intermediary nodes that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - pass - def __init__(self, + + def __init__( + self, *, - resource_logs : typing.Optional[typing.Iterable[global___ResourceLogs]] = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["resource_logs",b"resource_logs"]) -> None: ... + resource_logs: ( + collections.abc.Iterable[global___ResourceLogs] | None + ) = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "resource_logs", b"resource_logs" + ], + ) -> None: ... + global___LogsData = LogsData +@typing_extensions.final class ResourceLogs(google.protobuf.message.Message): """A collection of ScopeLogs from a Resource.""" - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + RESOURCE_FIELD_NUMBER: builtins.int SCOPE_LOGS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: + def resource( + self, + ) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: """The resource for the logs in this message. If this field is not set then resource info is unknown. """ - pass + @property - def scope_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeLogs]: + def scope_logs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___ScopeLogs + ]: """A list of ScopeLogs that originate from a resource.""" - pass - schema_url: typing.Text = ... + schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the resource data is recorded in. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to the data in the "resource" field. It does not apply to the data in the "scope_logs" field which have their own schema_url field. """ - - def __init__(self, + def __init__( + self, *, - resource : typing.Optional[opentelemetry.proto.resource.v1.resource_pb2.Resource] = ..., - scope_logs : typing.Optional[typing.Iterable[global___ScopeLogs]] = ..., - schema_url : typing.Text = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["resource",b"resource"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["resource",b"resource","schema_url",b"schema_url","scope_logs",b"scope_logs"]) -> None: ... + resource: ( + opentelemetry.proto.resource.v1.resource_pb2.Resource | None + ) = ..., + scope_logs: collections.abc.Iterable[global___ScopeLogs] | None = ..., + schema_url: builtins.str = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["resource", b"resource"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "resource", + b"resource", + "schema_url", + b"schema_url", + "scope_logs", + b"scope_logs", + ], + ) -> None: ... + global___ResourceLogs = ResourceLogs +@typing_extensions.final class ScopeLogs(google.protobuf.message.Message): """A collection of Logs produced by a Scope.""" - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + SCOPE_FIELD_NUMBER: builtins.int LOG_RECORDS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def scope(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: + def scope( + self, + ) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: """The instrumentation scope information for the logs in this message. Semantically when InstrumentationScope isn't set, it is equivalent with an empty instrumentation scope name (unknown). """ - pass + @property - def log_records(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___LogRecord]: + def log_records( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___LogRecord + ]: """A list of log records.""" - pass - schema_url: typing.Text = ... + schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the log data is recorded in. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to all logs in the "logs" field. """ - - def __init__(self, + def __init__( + self, *, - scope : typing.Optional[opentelemetry.proto.common.v1.common_pb2.InstrumentationScope] = ..., - log_records : typing.Optional[typing.Iterable[global___LogRecord]] = ..., - schema_url : typing.Text = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["scope",b"scope"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["log_records",b"log_records","schema_url",b"schema_url","scope",b"scope"]) -> None: ... + scope: ( + opentelemetry.proto.common.v1.common_pb2.InstrumentationScope + | None + ) = ..., + log_records: collections.abc.Iterable[global___LogRecord] | None = ..., + schema_url: builtins.str = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["scope", b"scope"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "log_records", + b"log_records", + "schema_url", + b"schema_url", + "scope", + b"scope", + ], + ) -> None: ... + global___ScopeLogs = ScopeLogs +@typing_extensions.final class LogRecord(google.protobuf.message.Message): """A log record according to OpenTelemetry Log Data Model: https://github.com/open-telemetry/oteps/blob/main/text/logs/0097-log-data-model.md """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + TIME_UNIX_NANO_FIELD_NUMBER: builtins.int OBSERVED_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int SEVERITY_NUMBER_FIELD_NUMBER: builtins.int @@ -226,13 +322,12 @@ class LogRecord(google.protobuf.message.Message): FLAGS_FIELD_NUMBER: builtins.int TRACE_ID_FIELD_NUMBER: builtins.int SPAN_ID_FIELD_NUMBER: builtins.int - time_unix_nano: builtins.int = ... + time_unix_nano: builtins.int """time_unix_nano is the time when the event occurred. Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. Value of 0 indicates unknown or missing timestamp. """ - - observed_time_unix_nano: builtins.int = ... + observed_time_unix_nano: builtins.int """Time when the event was observed by the collection system. For events that originate in OpenTelemetry (e.g. using OpenTelemetry Logging SDK) this timestamp is typically set at the generation time and is equal to Timestamp. @@ -249,41 +344,40 @@ class LogRecord(google.protobuf.message.Message): Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. Value of 0 indicates unknown or missing timestamp. """ - - severity_number: global___SeverityNumber.V = ... + severity_number: global___SeverityNumber.ValueType """Numerical value of the severity, normalized to values described in Log Data Model. [Optional]. """ - - severity_text: typing.Text = ... + severity_text: builtins.str """The severity text (also known as log level). The original string representation as it is known at the source. [Optional]. """ - @property def body(self) -> opentelemetry.proto.common.v1.common_pb2.AnyValue: """A value containing the body of the log record. Can be for example a human-readable string message (including multi-line) describing the event in a free form or it can be a structured data composed of arrays and maps of other values. [Optional]. """ - pass + @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ]: """Additional attributes that describe the specific event occurrence. [Optional]. Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - pass - dropped_attributes_count: builtins.int = ... - flags: builtins.int = ... + dropped_attributes_count: builtins.int + flags: builtins.int """Flags, a bit field. 8 least significant bits are the trace flags as defined in W3C Trace Context specification. 24 most significant bits are reserved and must be set to 0. Readers must not assume that 24 most significant bits will be zero and must correctly mask the bits when reading 8-bit trace flag (use flags & LOG_RECORD_FLAGS_TRACE_FLAGS_MASK). [Optional]. """ - - trace_id: builtins.bytes = ... + trace_id: builtins.bytes """A unique identifier for a trace. All logs from the same trace share the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes OR of length other than 16 bytes is considered invalid (empty string in OTLP/JSON @@ -296,8 +390,7 @@ class LogRecord(google.protobuf.message.Message): - the field is not present, - the field contains an invalid value. """ - - span_id: builtins.bytes = ... + span_id: builtins.bytes """A unique identifier for a span within a trace, assigned when the span is created. The ID is an 8-byte array. An ID with all zeroes OR of length other than 8 bytes is considered invalid (empty string in OTLP/JSON @@ -311,20 +404,52 @@ class LogRecord(google.protobuf.message.Message): - the field is not present, - the field contains an invalid value. """ - - def __init__(self, + def __init__( + self, *, - time_unix_nano : builtins.int = ..., - observed_time_unix_nano : builtins.int = ..., - severity_number : global___SeverityNumber.V = ..., - severity_text : typing.Text = ..., - body : typing.Optional[opentelemetry.proto.common.v1.common_pb2.AnyValue] = ..., - attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., - dropped_attributes_count : builtins.int = ..., - flags : builtins.int = ..., - trace_id : builtins.bytes = ..., - span_id : builtins.bytes = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["body",b"body"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","body",b"body","dropped_attributes_count",b"dropped_attributes_count","flags",b"flags","observed_time_unix_nano",b"observed_time_unix_nano","severity_number",b"severity_number","severity_text",b"severity_text","span_id",b"span_id","time_unix_nano",b"time_unix_nano","trace_id",b"trace_id"]) -> None: ... + time_unix_nano: builtins.int = ..., + observed_time_unix_nano: builtins.int = ..., + severity_number: global___SeverityNumber.ValueType = ..., + severity_text: builtins.str = ..., + body: opentelemetry.proto.common.v1.common_pb2.AnyValue | None = ..., + attributes: ( + collections.abc.Iterable[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ] + | None + ) = ..., + dropped_attributes_count: builtins.int = ..., + flags: builtins.int = ..., + trace_id: builtins.bytes = ..., + span_id: builtins.bytes = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["body", b"body"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "attributes", + b"attributes", + "body", + b"body", + "dropped_attributes_count", + b"dropped_attributes_count", + "flags", + b"flags", + "observed_time_unix_nano", + b"observed_time_unix_nano", + "severity_number", + b"severity_number", + "severity_text", + b"severity_text", + "span_id", + b"span_id", + "time_unix_nano", + b"time_unix_nano", + "trace_id", + b"trace_id", + ], + ) -> None: ... + global___LogRecord = LogRecord diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py index e0b8f3bbc52..98ed5c33f59 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py @@ -1,203 +1,74 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: opentelemetry/proto/metrics/v1/metrics.proto +# Protobuf Python Version: 5.26.1 """Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2 -from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,opentelemetry/proto/metrics/v1/metrics.proto\x12\x1eopentelemetry.proto.metrics.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"X\n\x0bMetricsData\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics\"\xaf\x01\n\x0fResourceMetrics\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12\x43\n\rscope_metrics\x18\x02 \x03(\x0b\x32,.opentelemetry.proto.metrics.v1.ScopeMetrics\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\x9f\x01\n\x0cScopeMetrics\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x37\n\x07metrics\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.metrics.v1.Metric\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xcd\x03\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04unit\x18\x03 \x01(\t\x12\x36\n\x05gauge\x18\x05 \x01(\x0b\x32%.opentelemetry.proto.metrics.v1.GaugeH\x00\x12\x32\n\x03sum\x18\x07 \x01(\x0b\x32#.opentelemetry.proto.metrics.v1.SumH\x00\x12>\n\thistogram\x18\t \x01(\x0b\x32).opentelemetry.proto.metrics.v1.HistogramH\x00\x12U\n\x15\x65xponential_histogram\x18\n \x01(\x0b\x32\x34.opentelemetry.proto.metrics.v1.ExponentialHistogramH\x00\x12:\n\x07summary\x18\x0b \x01(\x0b\x32\'.opentelemetry.proto.metrics.v1.SummaryH\x00\x12\x39\n\x08metadata\x18\x0c \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValueB\x06\n\x04\x64\x61taJ\x04\x08\x04\x10\x05J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\t\"M\n\x05Gauge\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\"\xba\x01\n\x03Sum\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\x12\x14\n\x0cis_monotonic\x18\x03 \x01(\x08\"\xad\x01\n\tHistogram\x12G\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x32.opentelemetry.proto.metrics.v1.HistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\"\xc3\x01\n\x14\x45xponentialHistogram\x12R\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32=.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\"P\n\x07Summary\x12\x45\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x30.opentelemetry.proto.metrics.v1.SummaryDataPoint\"\x86\x02\n\x0fNumberDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\x13\n\tas_double\x18\x04 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12;\n\texemplars\x18\x05 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\x08 \x01(\rB\x07\n\x05valueJ\x04\x08\x01\x10\x02\"\xe6\x02\n\x12HistogramDataPoint\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\rbucket_counts\x18\x06 \x03(\x06\x12\x17\n\x0f\x65xplicit_bounds\x18\x07 \x03(\x01\x12;\n\texemplars\x18\x08 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\n \x01(\r\x12\x10\n\x03min\x18\x0b \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\x0c \x01(\x01H\x02\x88\x01\x01\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_maxJ\x04\x08\x01\x10\x02\"\xda\x04\n\x1d\x45xponentialHistogramDataPoint\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\r\n\x05scale\x18\x06 \x01(\x11\x12\x12\n\nzero_count\x18\x07 \x01(\x06\x12W\n\x08positive\x18\x08 \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12W\n\x08negative\x18\t \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12\r\n\x05\x66lags\x18\n \x01(\r\x12;\n\texemplars\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\x10\n\x03min\x18\x0c \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\r \x01(\x01H\x02\x88\x01\x01\x12\x16\n\x0ezero_threshold\x18\x0e \x01(\x01\x1a\x30\n\x07\x42uckets\x12\x0e\n\x06offset\x18\x01 \x01(\x11\x12\x15\n\rbucket_counts\x18\x02 \x03(\x04\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_max\"\xc5\x02\n\x10SummaryDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x0b\n\x03sum\x18\x05 \x01(\x01\x12Y\n\x0fquantile_values\x18\x06 \x03(\x0b\x32@.opentelemetry.proto.metrics.v1.SummaryDataPoint.ValueAtQuantile\x12\r\n\x05\x66lags\x18\x08 \x01(\r\x1a\x32\n\x0fValueAtQuantile\x12\x10\n\x08quantile\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01J\x04\x08\x01\x10\x02\"\xc1\x01\n\x08\x45xemplar\x12\x44\n\x13\x66iltered_attributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x16\n\x0etime_unix_nano\x18\x02 \x01(\x06\x12\x13\n\tas_double\x18\x03 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12\x0f\n\x07span_id\x18\x04 \x01(\x0c\x12\x10\n\x08trace_id\x18\x05 \x01(\x0c\x42\x07\n\x05valueJ\x04\x08\x01\x10\x02*\x8c\x01\n\x16\x41ggregationTemporality\x12\'\n#AGGREGATION_TEMPORALITY_UNSPECIFIED\x10\x00\x12!\n\x1d\x41GGREGATION_TEMPORALITY_DELTA\x10\x01\x12&\n\"AGGREGATION_TEMPORALITY_CUMULATIVE\x10\x02*^\n\x0e\x44\x61taPointFlags\x12\x1f\n\x1b\x44\x41TA_POINT_FLAGS_DO_NOT_USE\x10\x00\x12+\n\'DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK\x10\x01\x42\x7f\n!io.opentelemetry.proto.metrics.v1B\x0cMetricsProtoP\x01Z)go.opentelemetry.io/proto/otlp/metrics/v1\xaa\x02\x1eOpenTelemetry.Proto.Metrics.V1b\x06proto3') - -_AGGREGATIONTEMPORALITY = DESCRIPTOR.enum_types_by_name['AggregationTemporality'] -AggregationTemporality = enum_type_wrapper.EnumTypeWrapper(_AGGREGATIONTEMPORALITY) -_DATAPOINTFLAGS = DESCRIPTOR.enum_types_by_name['DataPointFlags'] -DataPointFlags = enum_type_wrapper.EnumTypeWrapper(_DATAPOINTFLAGS) -AGGREGATION_TEMPORALITY_UNSPECIFIED = 0 -AGGREGATION_TEMPORALITY_DELTA = 1 -AGGREGATION_TEMPORALITY_CUMULATIVE = 2 -DATA_POINT_FLAGS_DO_NOT_USE = 0 -DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK = 1 - - -_METRICSDATA = DESCRIPTOR.message_types_by_name['MetricsData'] -_RESOURCEMETRICS = DESCRIPTOR.message_types_by_name['ResourceMetrics'] -_SCOPEMETRICS = DESCRIPTOR.message_types_by_name['ScopeMetrics'] -_METRIC = DESCRIPTOR.message_types_by_name['Metric'] -_GAUGE = DESCRIPTOR.message_types_by_name['Gauge'] -_SUM = DESCRIPTOR.message_types_by_name['Sum'] -_HISTOGRAM = DESCRIPTOR.message_types_by_name['Histogram'] -_EXPONENTIALHISTOGRAM = DESCRIPTOR.message_types_by_name['ExponentialHistogram'] -_SUMMARY = DESCRIPTOR.message_types_by_name['Summary'] -_NUMBERDATAPOINT = DESCRIPTOR.message_types_by_name['NumberDataPoint'] -_HISTOGRAMDATAPOINT = DESCRIPTOR.message_types_by_name['HistogramDataPoint'] -_EXPONENTIALHISTOGRAMDATAPOINT = DESCRIPTOR.message_types_by_name['ExponentialHistogramDataPoint'] -_EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS = _EXPONENTIALHISTOGRAMDATAPOINT.nested_types_by_name['Buckets'] -_SUMMARYDATAPOINT = DESCRIPTOR.message_types_by_name['SummaryDataPoint'] -_SUMMARYDATAPOINT_VALUEATQUANTILE = _SUMMARYDATAPOINT.nested_types_by_name['ValueAtQuantile'] -_EXEMPLAR = DESCRIPTOR.message_types_by_name['Exemplar'] -MetricsData = _reflection.GeneratedProtocolMessageType('MetricsData', (_message.Message,), { - 'DESCRIPTOR' : _METRICSDATA, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.MetricsData) - }) -_sym_db.RegisterMessage(MetricsData) - -ResourceMetrics = _reflection.GeneratedProtocolMessageType('ResourceMetrics', (_message.Message,), { - 'DESCRIPTOR' : _RESOURCEMETRICS, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.ResourceMetrics) - }) -_sym_db.RegisterMessage(ResourceMetrics) - -ScopeMetrics = _reflection.GeneratedProtocolMessageType('ScopeMetrics', (_message.Message,), { - 'DESCRIPTOR' : _SCOPEMETRICS, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.ScopeMetrics) - }) -_sym_db.RegisterMessage(ScopeMetrics) - -Metric = _reflection.GeneratedProtocolMessageType('Metric', (_message.Message,), { - 'DESCRIPTOR' : _METRIC, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.Metric) - }) -_sym_db.RegisterMessage(Metric) - -Gauge = _reflection.GeneratedProtocolMessageType('Gauge', (_message.Message,), { - 'DESCRIPTOR' : _GAUGE, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.Gauge) - }) -_sym_db.RegisterMessage(Gauge) - -Sum = _reflection.GeneratedProtocolMessageType('Sum', (_message.Message,), { - 'DESCRIPTOR' : _SUM, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.Sum) - }) -_sym_db.RegisterMessage(Sum) - -Histogram = _reflection.GeneratedProtocolMessageType('Histogram', (_message.Message,), { - 'DESCRIPTOR' : _HISTOGRAM, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.Histogram) - }) -_sym_db.RegisterMessage(Histogram) - -ExponentialHistogram = _reflection.GeneratedProtocolMessageType('ExponentialHistogram', (_message.Message,), { - 'DESCRIPTOR' : _EXPONENTIALHISTOGRAM, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.ExponentialHistogram) - }) -_sym_db.RegisterMessage(ExponentialHistogram) - -Summary = _reflection.GeneratedProtocolMessageType('Summary', (_message.Message,), { - 'DESCRIPTOR' : _SUMMARY, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.Summary) - }) -_sym_db.RegisterMessage(Summary) - -NumberDataPoint = _reflection.GeneratedProtocolMessageType('NumberDataPoint', (_message.Message,), { - 'DESCRIPTOR' : _NUMBERDATAPOINT, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.NumberDataPoint) - }) -_sym_db.RegisterMessage(NumberDataPoint) - -HistogramDataPoint = _reflection.GeneratedProtocolMessageType('HistogramDataPoint', (_message.Message,), { - 'DESCRIPTOR' : _HISTOGRAMDATAPOINT, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.HistogramDataPoint) - }) -_sym_db.RegisterMessage(HistogramDataPoint) - -ExponentialHistogramDataPoint = _reflection.GeneratedProtocolMessageType('ExponentialHistogramDataPoint', (_message.Message,), { - - 'Buckets' : _reflection.GeneratedProtocolMessageType('Buckets', (_message.Message,), { - 'DESCRIPTOR' : _EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets) - }) - , - 'DESCRIPTOR' : _EXPONENTIALHISTOGRAMDATAPOINT, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint) - }) -_sym_db.RegisterMessage(ExponentialHistogramDataPoint) -_sym_db.RegisterMessage(ExponentialHistogramDataPoint.Buckets) - -SummaryDataPoint = _reflection.GeneratedProtocolMessageType('SummaryDataPoint', (_message.Message,), { - - 'ValueAtQuantile' : _reflection.GeneratedProtocolMessageType('ValueAtQuantile', (_message.Message,), { - 'DESCRIPTOR' : _SUMMARYDATAPOINT_VALUEATQUANTILE, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.SummaryDataPoint.ValueAtQuantile) - }) - , - 'DESCRIPTOR' : _SUMMARYDATAPOINT, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.SummaryDataPoint) - }) -_sym_db.RegisterMessage(SummaryDataPoint) -_sym_db.RegisterMessage(SummaryDataPoint.ValueAtQuantile) - -Exemplar = _reflection.GeneratedProtocolMessageType('Exemplar', (_message.Message,), { - 'DESCRIPTOR' : _EXEMPLAR, - '__module__' : 'opentelemetry.proto.metrics.v1.metrics_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.v1.Exemplar) - }) -_sym_db.RegisterMessage(Exemplar) - -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n!io.opentelemetry.proto.metrics.v1B\014MetricsProtoP\001Z)go.opentelemetry.io/proto/otlp/metrics/v1\252\002\036OpenTelemetry.Proto.Metrics.V1' - _AGGREGATIONTEMPORALITY._serialized_start=3546 - _AGGREGATIONTEMPORALITY._serialized_end=3686 - _DATAPOINTFLAGS._serialized_start=3688 - _DATAPOINTFLAGS._serialized_end=3782 - _METRICSDATA._serialized_start=172 - _METRICSDATA._serialized_end=260 - _RESOURCEMETRICS._serialized_start=263 - _RESOURCEMETRICS._serialized_end=438 - _SCOPEMETRICS._serialized_start=441 - _SCOPEMETRICS._serialized_end=600 - _METRIC._serialized_start=603 - _METRIC._serialized_end=1064 - _GAUGE._serialized_start=1066 - _GAUGE._serialized_end=1143 - _SUM._serialized_start=1146 - _SUM._serialized_end=1332 - _HISTOGRAM._serialized_start=1335 - _HISTOGRAM._serialized_end=1508 - _EXPONENTIALHISTOGRAM._serialized_start=1511 - _EXPONENTIALHISTOGRAM._serialized_end=1706 - _SUMMARY._serialized_start=1708 - _SUMMARY._serialized_end=1788 - _NUMBERDATAPOINT._serialized_start=1791 - _NUMBERDATAPOINT._serialized_end=2053 - _HISTOGRAMDATAPOINT._serialized_start=2056 - _HISTOGRAMDATAPOINT._serialized_end=2414 - _EXPONENTIALHISTOGRAMDATAPOINT._serialized_start=2417 - _EXPONENTIALHISTOGRAMDATAPOINT._serialized_end=3019 - _EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS._serialized_start=2947 - _EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS._serialized_end=2995 - _SUMMARYDATAPOINT._serialized_start=3022 - _SUMMARYDATAPOINT._serialized_end=3347 - _SUMMARYDATAPOINT_VALUEATQUANTILE._serialized_start=3291 - _SUMMARYDATAPOINT_VALUEATQUANTILE._serialized_end=3341 - _EXEMPLAR._serialized_start=3350 - _EXEMPLAR._serialized_end=3543 +from opentelemetry.proto.common.v1 import ( + common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, +) +from opentelemetry.proto.resource.v1 import ( + resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n,opentelemetry/proto/metrics/v1/metrics.proto\x12\x1eopentelemetry.proto.metrics.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto"X\n\x0bMetricsData\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics"\xaf\x01\n\x0fResourceMetrics\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12\x43\n\rscope_metrics\x18\x02 \x03(\x0b\x32,.opentelemetry.proto.metrics.v1.ScopeMetrics\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07"\x9f\x01\n\x0cScopeMetrics\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x37\n\x07metrics\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.metrics.v1.Metric\x12\x12\n\nschema_url\x18\x03 \x01(\t"\xcd\x03\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04unit\x18\x03 \x01(\t\x12\x36\n\x05gauge\x18\x05 \x01(\x0b\x32%.opentelemetry.proto.metrics.v1.GaugeH\x00\x12\x32\n\x03sum\x18\x07 \x01(\x0b\x32#.opentelemetry.proto.metrics.v1.SumH\x00\x12>\n\thistogram\x18\t \x01(\x0b\x32).opentelemetry.proto.metrics.v1.HistogramH\x00\x12U\n\x15\x65xponential_histogram\x18\n \x01(\x0b\x32\x34.opentelemetry.proto.metrics.v1.ExponentialHistogramH\x00\x12:\n\x07summary\x18\x0b \x01(\x0b\x32\'.opentelemetry.proto.metrics.v1.SummaryH\x00\x12\x39\n\x08metadata\x18\x0c \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValueB\x06\n\x04\x64\x61taJ\x04\x08\x04\x10\x05J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\t"M\n\x05Gauge\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint"\xba\x01\n\x03Sum\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\x12\x14\n\x0cis_monotonic\x18\x03 \x01(\x08"\xad\x01\n\tHistogram\x12G\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x32.opentelemetry.proto.metrics.v1.HistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality"\xc3\x01\n\x14\x45xponentialHistogram\x12R\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32=.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality"P\n\x07Summary\x12\x45\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x30.opentelemetry.proto.metrics.v1.SummaryDataPoint"\x86\x02\n\x0fNumberDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\x13\n\tas_double\x18\x04 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12;\n\texemplars\x18\x05 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\x08 \x01(\rB\x07\n\x05valueJ\x04\x08\x01\x10\x02"\xe6\x02\n\x12HistogramDataPoint\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\rbucket_counts\x18\x06 \x03(\x06\x12\x17\n\x0f\x65xplicit_bounds\x18\x07 \x03(\x01\x12;\n\texemplars\x18\x08 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\n \x01(\r\x12\x10\n\x03min\x18\x0b \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\x0c \x01(\x01H\x02\x88\x01\x01\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_maxJ\x04\x08\x01\x10\x02"\xda\x04\n\x1d\x45xponentialHistogramDataPoint\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\r\n\x05scale\x18\x06 \x01(\x11\x12\x12\n\nzero_count\x18\x07 \x01(\x06\x12W\n\x08positive\x18\x08 \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12W\n\x08negative\x18\t \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12\r\n\x05\x66lags\x18\n \x01(\r\x12;\n\texemplars\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\x10\n\x03min\x18\x0c \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\r \x01(\x01H\x02\x88\x01\x01\x12\x16\n\x0ezero_threshold\x18\x0e \x01(\x01\x1a\x30\n\x07\x42uckets\x12\x0e\n\x06offset\x18\x01 \x01(\x11\x12\x15\n\rbucket_counts\x18\x02 \x03(\x04\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_max"\xc5\x02\n\x10SummaryDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x0b\n\x03sum\x18\x05 \x01(\x01\x12Y\n\x0fquantile_values\x18\x06 \x03(\x0b\x32@.opentelemetry.proto.metrics.v1.SummaryDataPoint.ValueAtQuantile\x12\r\n\x05\x66lags\x18\x08 \x01(\r\x1a\x32\n\x0fValueAtQuantile\x12\x10\n\x08quantile\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01J\x04\x08\x01\x10\x02"\xc1\x01\n\x08\x45xemplar\x12\x44\n\x13\x66iltered_attributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x16\n\x0etime_unix_nano\x18\x02 \x01(\x06\x12\x13\n\tas_double\x18\x03 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12\x0f\n\x07span_id\x18\x04 \x01(\x0c\x12\x10\n\x08trace_id\x18\x05 \x01(\x0c\x42\x07\n\x05valueJ\x04\x08\x01\x10\x02*\x8c\x01\n\x16\x41ggregationTemporality\x12\'\n#AGGREGATION_TEMPORALITY_UNSPECIFIED\x10\x00\x12!\n\x1d\x41GGREGATION_TEMPORALITY_DELTA\x10\x01\x12&\n"AGGREGATION_TEMPORALITY_CUMULATIVE\x10\x02*^\n\x0e\x44\x61taPointFlags\x12\x1f\n\x1b\x44\x41TA_POINT_FLAGS_DO_NOT_USE\x10\x00\x12+\n\'DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK\x10\x01\x42\x7f\n!io.opentelemetry.proto.metrics.v1B\x0cMetricsProtoP\x01Z)go.opentelemetry.io/proto/otlp/metrics/v1\xaa\x02\x1eOpenTelemetry.Proto.Metrics.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.metrics.v1.metrics_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n!io.opentelemetry.proto.metrics.v1B\014MetricsProtoP\001Z)go.opentelemetry.io/proto/otlp/metrics/v1\252\002\036OpenTelemetry.Proto.Metrics.V1" + ) + _globals["_AGGREGATIONTEMPORALITY"]._serialized_start = 3546 + _globals["_AGGREGATIONTEMPORALITY"]._serialized_end = 3686 + _globals["_DATAPOINTFLAGS"]._serialized_start = 3688 + _globals["_DATAPOINTFLAGS"]._serialized_end = 3782 + _globals["_METRICSDATA"]._serialized_start = 172 + _globals["_METRICSDATA"]._serialized_end = 260 + _globals["_RESOURCEMETRICS"]._serialized_start = 263 + _globals["_RESOURCEMETRICS"]._serialized_end = 438 + _globals["_SCOPEMETRICS"]._serialized_start = 441 + _globals["_SCOPEMETRICS"]._serialized_end = 600 + _globals["_METRIC"]._serialized_start = 603 + _globals["_METRIC"]._serialized_end = 1064 + _globals["_GAUGE"]._serialized_start = 1066 + _globals["_GAUGE"]._serialized_end = 1143 + _globals["_SUM"]._serialized_start = 1146 + _globals["_SUM"]._serialized_end = 1332 + _globals["_HISTOGRAM"]._serialized_start = 1335 + _globals["_HISTOGRAM"]._serialized_end = 1508 + _globals["_EXPONENTIALHISTOGRAM"]._serialized_start = 1511 + _globals["_EXPONENTIALHISTOGRAM"]._serialized_end = 1706 + _globals["_SUMMARY"]._serialized_start = 1708 + _globals["_SUMMARY"]._serialized_end = 1788 + _globals["_NUMBERDATAPOINT"]._serialized_start = 1791 + _globals["_NUMBERDATAPOINT"]._serialized_end = 2053 + _globals["_HISTOGRAMDATAPOINT"]._serialized_start = 2056 + _globals["_HISTOGRAMDATAPOINT"]._serialized_end = 2414 + _globals["_EXPONENTIALHISTOGRAMDATAPOINT"]._serialized_start = 2417 + _globals["_EXPONENTIALHISTOGRAMDATAPOINT"]._serialized_end = 3019 + _globals["_EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS"]._serialized_start = 2947 + _globals["_EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS"]._serialized_end = 2995 + _globals["_SUMMARYDATAPOINT"]._serialized_start = 3022 + _globals["_SUMMARYDATAPOINT"]._serialized_end = 3347 + _globals["_SUMMARYDATAPOINT_VALUEATQUANTILE"]._serialized_start = 3291 + _globals["_SUMMARYDATAPOINT_VALUEATQUANTILE"]._serialized_end = 3341 + _globals["_EXEMPLAR"]._serialized_start = 3350 + _globals["_EXEMPLAR"]._serialized_end = 3543 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi index 8fd86329610..7bb4d95b102 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi @@ -1,33 +1,53 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file +Copyright 2019, OpenTelemetry Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. """ + import builtins +import collections.abc import google.protobuf.descriptor import google.protobuf.internal.containers import google.protobuf.internal.enum_type_wrapper import google.protobuf.message import opentelemetry.proto.common.v1.common_pb2 import opentelemetry.proto.resource.v1.resource_pb2 +import sys import typing -import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -class AggregationTemporality(_AggregationTemporality, metaclass=_AggregationTemporalityEnumTypeWrapper): - """AggregationTemporality defines how a metric aggregator reports aggregated - values. It describes how those values relate to the time interval over - which they are aggregated. - """ - pass class _AggregationTemporality: - V = typing.NewType('V', builtins.int) -class _AggregationTemporalityEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AggregationTemporality.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - AGGREGATION_TEMPORALITY_UNSPECIFIED = AggregationTemporality.V(0) + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _AggregationTemporalityEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + _AggregationTemporality.ValueType + ], + builtins.type, +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + AGGREGATION_TEMPORALITY_UNSPECIFIED: _AggregationTemporality.ValueType # 0 """UNSPECIFIED is the default AggregationTemporality, it MUST not be used.""" - - AGGREGATION_TEMPORALITY_DELTA = AggregationTemporality.V(1) + AGGREGATION_TEMPORALITY_DELTA: _AggregationTemporality.ValueType # 1 """DELTA is an AggregationTemporality for a metric aggregator which reports changes since last report time. Successive metrics contain aggregation of values from continuous and non-overlapping intervals. @@ -53,8 +73,7 @@ class _AggregationTemporalityEnumTypeWrapper(google.protobuf.internal.enum_type_ number of requests received over the interval of time t_0+1 to t_0+2 with a value of 2. """ - - AGGREGATION_TEMPORALITY_CUMULATIVE = AggregationTemporality.V(2) + AGGREGATION_TEMPORALITY_CUMULATIVE: _AggregationTemporality.ValueType # 2 """CUMULATIVE is an AggregationTemporality for a metric aggregator which reports changes since a fixed start time. This means that current values of a CUMULATIVE metric depend on all previous measurements since the @@ -92,11 +111,17 @@ class _AggregationTemporalityEnumTypeWrapper(google.protobuf.internal.enum_type_ value was reset (e.g. Prometheus). """ +class AggregationTemporality( + _AggregationTemporality, metaclass=_AggregationTemporalityEnumTypeWrapper +): + """AggregationTemporality defines how a metric aggregator reports aggregated + values. It describes how those values relate to the time interval over + which they are aggregated. + """ -AGGREGATION_TEMPORALITY_UNSPECIFIED = AggregationTemporality.V(0) +AGGREGATION_TEMPORALITY_UNSPECIFIED: AggregationTemporality.ValueType # 0 """UNSPECIFIED is the default AggregationTemporality, it MUST not be used.""" - -AGGREGATION_TEMPORALITY_DELTA = AggregationTemporality.V(1) +AGGREGATION_TEMPORALITY_DELTA: AggregationTemporality.ValueType # 1 """DELTA is an AggregationTemporality for a metric aggregator which reports changes since last report time. Successive metrics contain aggregation of values from continuous and non-overlapping intervals. @@ -122,8 +147,7 @@ DELTA metric: number of requests received over the interval of time t_0+1 to t_0+2 with a value of 2. """ - -AGGREGATION_TEMPORALITY_CUMULATIVE = AggregationTemporality.V(2) +AGGREGATION_TEMPORALITY_CUMULATIVE: AggregationTemporality.ValueType # 2 """CUMULATIVE is an AggregationTemporality for a metric aggregator which reports changes since a fixed start time. This means that current values of a CUMULATIVE metric depend on all previous measurements since the @@ -160,49 +184,52 @@ CUMULATIVE is valid, it is not recommended. This may cause problems for systems that do not use start_time to determine when the aggregation value was reset (e.g. Prometheus). """ - global___AggregationTemporality = AggregationTemporality - -class DataPointFlags(_DataPointFlags, metaclass=_DataPointFlagsEnumTypeWrapper): - """DataPointFlags is defined as a protobuf 'uint32' type and is to be used as a - bit-field representing 32 distinct boolean flags. Each flag defined in this - enum is a bit-mask. To test the presence of a single flag in the flags of - a data point, for example, use an expression like: - - (point.flags & DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK) == DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK - """ - pass class _DataPointFlags: - V = typing.NewType('V', builtins.int) -class _DataPointFlagsEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DataPointFlags.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - DATA_POINT_FLAGS_DO_NOT_USE = DataPointFlags.V(0) + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _DataPointFlagsEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + _DataPointFlags.ValueType + ], + builtins.type, +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + DATA_POINT_FLAGS_DO_NOT_USE: _DataPointFlags.ValueType # 0 """The zero value for the enum. Should not be used for comparisons. Instead use bitwise "and" with the appropriate mask as shown above. """ - - DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK = DataPointFlags.V(1) + DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK: _DataPointFlags.ValueType # 1 """This DataPoint is valid but has no recorded value. This value SHOULD be used to reflect explicitly missing data in a series, as for an equivalent to the Prometheus "staleness marker". """ +class DataPointFlags( + _DataPointFlags, metaclass=_DataPointFlagsEnumTypeWrapper +): + """DataPointFlags is defined as a protobuf 'uint32' type and is to be used as a + bit-field representing 32 distinct boolean flags. Each flag defined in this + enum is a bit-mask. To test the presence of a single flag in the flags of + a data point, for example, use an expression like: + + (point.flags & DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK) == DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK + """ -DATA_POINT_FLAGS_DO_NOT_USE = DataPointFlags.V(0) +DATA_POINT_FLAGS_DO_NOT_USE: DataPointFlags.ValueType # 0 """The zero value for the enum. Should not be used for comparisons. Instead use bitwise "and" with the appropriate mask as shown above. """ - -DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK = DataPointFlags.V(1) +DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK: DataPointFlags.ValueType # 1 """This DataPoint is valid but has no recorded value. This value SHOULD be used to reflect explicitly missing data in a series, as for an equivalent to the Prometheus "staleness marker". """ - global___DataPointFlags = DataPointFlags - +@typing_extensions.final class MetricsData(google.protobuf.message.Message): """MetricsData represents the metrics data that can be stored in a persistent storage, OR can be embedded by other protocols that transfer OTLP metrics @@ -215,92 +242,157 @@ class MetricsData(google.protobuf.message.Message): When new fields are added into this message, the OTLP request MUST be updated as well. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + RESOURCE_METRICS_FIELD_NUMBER: builtins.int @property - def resource_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceMetrics]: + def resource_metrics( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___ResourceMetrics + ]: """An array of ResourceMetrics. For data coming from a single resource this array will typically contain one element. Intermediary nodes that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - pass - def __init__(self, + + def __init__( + self, *, - resource_metrics : typing.Optional[typing.Iterable[global___ResourceMetrics]] = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["resource_metrics",b"resource_metrics"]) -> None: ... + resource_metrics: ( + collections.abc.Iterable[global___ResourceMetrics] | None + ) = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "resource_metrics", b"resource_metrics" + ], + ) -> None: ... + global___MetricsData = MetricsData +@typing_extensions.final class ResourceMetrics(google.protobuf.message.Message): """A collection of ScopeMetrics from a Resource.""" - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + RESOURCE_FIELD_NUMBER: builtins.int SCOPE_METRICS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: + def resource( + self, + ) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: """The resource for the metrics in this message. If this field is not set then no resource info is known. """ - pass + @property - def scope_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeMetrics]: + def scope_metrics( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___ScopeMetrics + ]: """A list of metrics that originate from a resource.""" - pass - schema_url: typing.Text = ... + schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the resource data is recorded in. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to the data in the "resource" field. It does not apply to the data in the "scope_metrics" field which have their own schema_url field. """ - - def __init__(self, + def __init__( + self, *, - resource : typing.Optional[opentelemetry.proto.resource.v1.resource_pb2.Resource] = ..., - scope_metrics : typing.Optional[typing.Iterable[global___ScopeMetrics]] = ..., - schema_url : typing.Text = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["resource",b"resource"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["resource",b"resource","schema_url",b"schema_url","scope_metrics",b"scope_metrics"]) -> None: ... + resource: ( + opentelemetry.proto.resource.v1.resource_pb2.Resource | None + ) = ..., + scope_metrics: ( + collections.abc.Iterable[global___ScopeMetrics] | None + ) = ..., + schema_url: builtins.str = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["resource", b"resource"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "resource", + b"resource", + "schema_url", + b"schema_url", + "scope_metrics", + b"scope_metrics", + ], + ) -> None: ... + global___ResourceMetrics = ResourceMetrics +@typing_extensions.final class ScopeMetrics(google.protobuf.message.Message): """A collection of Metrics produced by an Scope.""" - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + SCOPE_FIELD_NUMBER: builtins.int METRICS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def scope(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: + def scope( + self, + ) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: """The instrumentation scope information for the metrics in this message. Semantically when InstrumentationScope isn't set, it is equivalent with an empty instrumentation scope name (unknown). """ - pass + @property - def metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Metric]: + def metrics( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Metric + ]: """A list of metrics that originate from an instrumentation library.""" - pass - schema_url: typing.Text = ... + schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the metric data is recorded in. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to all metrics in the "metrics" field. """ - - def __init__(self, + def __init__( + self, *, - scope : typing.Optional[opentelemetry.proto.common.v1.common_pb2.InstrumentationScope] = ..., - metrics : typing.Optional[typing.Iterable[global___Metric]] = ..., - schema_url : typing.Text = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["scope",b"scope"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["metrics",b"metrics","schema_url",b"schema_url","scope",b"scope"]) -> None: ... + scope: ( + opentelemetry.proto.common.v1.common_pb2.InstrumentationScope + | None + ) = ..., + metrics: collections.abc.Iterable[global___Metric] | None = ..., + schema_url: builtins.str = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["scope", b"scope"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "metrics", + b"metrics", + "schema_url", + b"schema_url", + "scope", + b"scope", + ], + ) -> None: ... + global___ScopeMetrics = ScopeMetrics +@typing_extensions.final class Metric(google.protobuf.message.Message): """Defines a Metric which has one or more timeseries. The following is a brief summary of the Metric data model. For more details, see: @@ -388,7 +480,9 @@ class Metric(google.protobuf.message.Message): when the start time is truly unknown, setting StartTimeUnixNano is strongly encouraged. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + NAME_FIELD_NUMBER: builtins.int DESCRIPTION_FIELD_NUMBER: builtins.int UNIT_FIELD_NUMBER: builtins.int @@ -398,17 +492,14 @@ class Metric(google.protobuf.message.Message): EXPONENTIAL_HISTOGRAM_FIELD_NUMBER: builtins.int SUMMARY_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int - name: typing.Text = ... + name: builtins.str """name of the metric.""" - - description: typing.Text = ... + description: builtins.str """description of the metric, which can be used in documentation.""" - - unit: typing.Text = ... + unit: builtins.str """unit in which the metric value is reported. Follows the format described by http://unitsofmeasure.org/ucum.html. """ - @property def gauge(self) -> global___Gauge: ... @property @@ -420,7 +511,11 @@ class Metric(google.protobuf.message.Message): @property def summary(self) -> global___Summary: ... @property - def metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + def metadata( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ]: """Additional metadata attributes that describe the metric. [Optional]. Attributes are non-identifying. Consumers SHOULD NOT need to be aware of these attributes. @@ -429,24 +524,79 @@ class Metric(google.protobuf.message.Message): Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - pass - def __init__(self, + + def __init__( + self, *, - name : typing.Text = ..., - description : typing.Text = ..., - unit : typing.Text = ..., - gauge : typing.Optional[global___Gauge] = ..., - sum : typing.Optional[global___Sum] = ..., - histogram : typing.Optional[global___Histogram] = ..., - exponential_histogram : typing.Optional[global___ExponentialHistogram] = ..., - summary : typing.Optional[global___Summary] = ..., - metadata : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["data",b"data","exponential_histogram",b"exponential_histogram","gauge",b"gauge","histogram",b"histogram","sum",b"sum","summary",b"summary"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["data",b"data","description",b"description","exponential_histogram",b"exponential_histogram","gauge",b"gauge","histogram",b"histogram","metadata",b"metadata","name",b"name","sum",b"sum","summary",b"summary","unit",b"unit"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["data",b"data"]) -> typing.Optional[typing_extensions.Literal["gauge","sum","histogram","exponential_histogram","summary"]]: ... + name: builtins.str = ..., + description: builtins.str = ..., + unit: builtins.str = ..., + gauge: global___Gauge | None = ..., + sum: global___Sum | None = ..., + histogram: global___Histogram | None = ..., + exponential_histogram: global___ExponentialHistogram | None = ..., + summary: global___Summary | None = ..., + metadata: ( + collections.abc.Iterable[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ] + | None + ) = ..., + ) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "data", + b"data", + "exponential_histogram", + b"exponential_histogram", + "gauge", + b"gauge", + "histogram", + b"histogram", + "sum", + b"sum", + "summary", + b"summary", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "data", + b"data", + "description", + b"description", + "exponential_histogram", + b"exponential_histogram", + "gauge", + b"gauge", + "histogram", + b"histogram", + "metadata", + b"metadata", + "name", + b"name", + "sum", + b"sum", + "summary", + b"summary", + "unit", + b"unit", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["data", b"data"] + ) -> ( + typing_extensions.Literal[ + "gauge", "sum", "histogram", "exponential_histogram", "summary" + ] + | None + ): ... + global___Metric = Metric +@typing_extensions.final class Gauge(google.protobuf.message.Message): """Gauge represents the type of a scalar metric that always exports the "current value" for every data point. It should be used for an "unknown" @@ -458,88 +608,158 @@ class Gauge(google.protobuf.message.Message): AggregationTemporality is not included. Consequently, this also means "StartTimeUnixNano" is ignored for all data points. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + DATA_POINTS_FIELD_NUMBER: builtins.int @property - def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NumberDataPoint]: ... - def __init__(self, + def data_points( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___NumberDataPoint + ]: ... + def __init__( + self, *, - data_points : typing.Optional[typing.Iterable[global___NumberDataPoint]] = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["data_points",b"data_points"]) -> None: ... + data_points: ( + collections.abc.Iterable[global___NumberDataPoint] | None + ) = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal["data_points", b"data_points"], + ) -> None: ... + global___Gauge = Gauge +@typing_extensions.final class Sum(google.protobuf.message.Message): """Sum represents the type of a scalar metric that is calculated as a sum of all reported measurements over a time interval. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + DATA_POINTS_FIELD_NUMBER: builtins.int AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int IS_MONOTONIC_FIELD_NUMBER: builtins.int @property - def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NumberDataPoint]: ... - aggregation_temporality: global___AggregationTemporality.V = ... + def data_points( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___NumberDataPoint + ]: ... + aggregation_temporality: global___AggregationTemporality.ValueType """aggregation_temporality describes if the aggregator reports delta changes since last report time, or cumulative changes since a fixed start time. """ - - is_monotonic: builtins.bool = ... + is_monotonic: builtins.bool """If "true" means that the sum is monotonic.""" - - def __init__(self, + def __init__( + self, *, - data_points : typing.Optional[typing.Iterable[global___NumberDataPoint]] = ..., - aggregation_temporality : global___AggregationTemporality.V = ..., - is_monotonic : builtins.bool = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality",b"aggregation_temporality","data_points",b"data_points","is_monotonic",b"is_monotonic"]) -> None: ... + data_points: ( + collections.abc.Iterable[global___NumberDataPoint] | None + ) = ..., + aggregation_temporality: global___AggregationTemporality.ValueType = ..., + is_monotonic: builtins.bool = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "aggregation_temporality", + b"aggregation_temporality", + "data_points", + b"data_points", + "is_monotonic", + b"is_monotonic", + ], + ) -> None: ... + global___Sum = Sum +@typing_extensions.final class Histogram(google.protobuf.message.Message): """Histogram represents the type of a metric that is calculated by aggregating as a Histogram of all reported measurements over a time interval. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + DATA_POINTS_FIELD_NUMBER: builtins.int AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int @property - def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HistogramDataPoint]: ... - aggregation_temporality: global___AggregationTemporality.V = ... + def data_points( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___HistogramDataPoint + ]: ... + aggregation_temporality: global___AggregationTemporality.ValueType """aggregation_temporality describes if the aggregator reports delta changes since last report time, or cumulative changes since a fixed start time. """ - - def __init__(self, + def __init__( + self, *, - data_points : typing.Optional[typing.Iterable[global___HistogramDataPoint]] = ..., - aggregation_temporality : global___AggregationTemporality.V = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality",b"aggregation_temporality","data_points",b"data_points"]) -> None: ... + data_points: ( + collections.abc.Iterable[global___HistogramDataPoint] | None + ) = ..., + aggregation_temporality: global___AggregationTemporality.ValueType = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "aggregation_temporality", + b"aggregation_temporality", + "data_points", + b"data_points", + ], + ) -> None: ... + global___Histogram = Histogram +@typing_extensions.final class ExponentialHistogram(google.protobuf.message.Message): """ExponentialHistogram represents the type of a metric that is calculated by aggregating as a ExponentialHistogram of all reported double measurements over a time interval. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + DATA_POINTS_FIELD_NUMBER: builtins.int AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int @property - def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ExponentialHistogramDataPoint]: ... - aggregation_temporality: global___AggregationTemporality.V = ... + def data_points( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___ExponentialHistogramDataPoint + ]: ... + aggregation_temporality: global___AggregationTemporality.ValueType """aggregation_temporality describes if the aggregator reports delta changes since last report time, or cumulative changes since a fixed start time. """ - - def __init__(self, + def __init__( + self, *, - data_points : typing.Optional[typing.Iterable[global___ExponentialHistogramDataPoint]] = ..., - aggregation_temporality : global___AggregationTemporality.V = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality",b"aggregation_temporality","data_points",b"data_points"]) -> None: ... + data_points: ( + collections.abc.Iterable[global___ExponentialHistogramDataPoint] + | None + ) = ..., + aggregation_temporality: global___AggregationTemporality.ValueType = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "aggregation_temporality", + b"aggregation_temporality", + "data_points", + b"data_points", + ], + ) -> None: ... + global___ExponentialHistogram = ExponentialHistogram +@typing_extensions.final class Summary(google.protobuf.message.Message): """Summary metric data are used to convey quantile summaries, a Prometheus (see: https://prometheus.io/docs/concepts/metric_types/#summary) @@ -548,22 +768,38 @@ class Summary(google.protobuf.message.Message): While they can be useful in some applications, histogram data points are recommended for new applications. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + DATA_POINTS_FIELD_NUMBER: builtins.int @property - def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SummaryDataPoint]: ... - def __init__(self, + def data_points( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___SummaryDataPoint + ]: ... + def __init__( + self, *, - data_points : typing.Optional[typing.Iterable[global___SummaryDataPoint]] = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["data_points",b"data_points"]) -> None: ... + data_points: ( + collections.abc.Iterable[global___SummaryDataPoint] | None + ) = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal["data_points", b"data_points"], + ) -> None: ... + global___Summary = Summary +@typing_extensions.final class NumberDataPoint(google.protobuf.message.Message): """NumberDataPoint is a single data point in a timeseries that describes the time-varying scalar value of a metric. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + ATTRIBUTES_FIELD_NUMBER: builtins.int START_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int TIME_UNIX_NANO_FIELD_NUMBER: builtins.int @@ -572,56 +808,94 @@ class NumberDataPoint(google.protobuf.message.Message): EXEMPLARS_FIELD_NUMBER: builtins.int FLAGS_FIELD_NUMBER: builtins.int @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ]: """The set of key/value pairs that uniquely identify the timeseries from where this point belongs. The list may be empty (may contain 0 elements). Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - pass - start_time_unix_nano: builtins.int = ... + start_time_unix_nano: builtins.int """StartTimeUnixNano is optional but strongly encouraged, see the the detailed comments above Metric. Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. """ - - time_unix_nano: builtins.int = ... + time_unix_nano: builtins.int """TimeUnixNano is required, see the detailed comments above Metric. Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. """ - - as_double: builtins.float = ... - as_int: builtins.int = ... + as_double: builtins.float + as_int: builtins.int @property - def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Exemplar]: + def exemplars( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Exemplar + ]: """(Optional) List of exemplars collected from measurements that were used to form the data point """ - pass - flags: builtins.int = ... + flags: builtins.int """Flags that apply to this specific data point. See DataPointFlags for the available flags and their meaning. """ - - def __init__(self, + def __init__( + self, *, - attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., - start_time_unix_nano : builtins.int = ..., - time_unix_nano : builtins.int = ..., - as_double : builtins.float = ..., - as_int : builtins.int = ..., - exemplars : typing.Optional[typing.Iterable[global___Exemplar]] = ..., - flags : builtins.int = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["as_double",b"as_double","as_int",b"as_int","value",b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["as_double",b"as_double","as_int",b"as_int","attributes",b"attributes","exemplars",b"exemplars","flags",b"flags","start_time_unix_nano",b"start_time_unix_nano","time_unix_nano",b"time_unix_nano","value",b"value"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["as_double","as_int"]]: ... + attributes: ( + collections.abc.Iterable[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ] + | None + ) = ..., + start_time_unix_nano: builtins.int = ..., + time_unix_nano: builtins.int = ..., + as_double: builtins.float = ..., + as_int: builtins.int = ..., + exemplars: collections.abc.Iterable[global___Exemplar] | None = ..., + flags: builtins.int = ..., + ) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "as_double", b"as_double", "as_int", b"as_int", "value", b"value" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "as_double", + b"as_double", + "as_int", + b"as_int", + "attributes", + b"attributes", + "exemplars", + b"exemplars", + "flags", + b"flags", + "start_time_unix_nano", + b"start_time_unix_nano", + "time_unix_nano", + b"time_unix_nano", + "value", + b"value", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["value", b"value"] + ) -> typing_extensions.Literal["as_double", "as_int"] | None: ... + global___NumberDataPoint = NumberDataPoint +@typing_extensions.final class HistogramDataPoint(google.protobuf.message.Message): """HistogramDataPoint is a single data point in a timeseries that describes the time-varying values of a Histogram. A Histogram contains summary statistics @@ -634,7 +908,9 @@ class HistogramDataPoint(google.protobuf.message.Message): "explicit_bounds" and "bucket_counts" must be omitted and only "count" and "sum" are known. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + ATTRIBUTES_FIELD_NUMBER: builtins.int START_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int TIME_UNIX_NANO_FIELD_NUMBER: builtins.int @@ -647,35 +923,35 @@ class HistogramDataPoint(google.protobuf.message.Message): MIN_FIELD_NUMBER: builtins.int MAX_FIELD_NUMBER: builtins.int @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ]: """The set of key/value pairs that uniquely identify the timeseries from where this point belongs. The list may be empty (may contain 0 elements). Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - pass - start_time_unix_nano: builtins.int = ... + start_time_unix_nano: builtins.int """StartTimeUnixNano is optional but strongly encouraged, see the the detailed comments above Metric. Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. """ - - time_unix_nano: builtins.int = ... + time_unix_nano: builtins.int """TimeUnixNano is required, see the detailed comments above Metric. Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. """ - - count: builtins.int = ... + count: builtins.int """count is the number of values in the population. Must be non-negative. This value must be equal to the sum of the "count" fields in buckets if a histogram is provided. """ - - sum: builtins.float = ... + sum: builtins.float """sum of the values in the population. If count is zero then this field must be zero. @@ -685,9 +961,12 @@ class HistogramDataPoint(google.protobuf.message.Message): doing so. This is specifically to enforce compatibility w/ OpenMetrics, see: https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#histogram """ - @property - def bucket_counts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + def bucket_counts( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[ + builtins.int + ]: """bucket_counts is an optional field contains the count values of histogram for each bucket. @@ -696,9 +975,13 @@ class HistogramDataPoint(google.protobuf.message.Message): The number of elements in bucket_counts array must be by one greater than the number of elements in explicit_bounds array. """ - pass + @property - def explicit_bounds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: + def explicit_bounds( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[ + builtins.float + ]: """explicit_bounds specifies buckets with explicitly defined bounds for values. The boundaries for bucket at index i are: @@ -713,70 +996,140 @@ class HistogramDataPoint(google.protobuf.message.Message): bucket where the boundary is at infinity. This format is intentionally compatible with the OpenMetrics histogram definition. """ - pass + @property - def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Exemplar]: + def exemplars( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Exemplar + ]: """(Optional) List of exemplars collected from measurements that were used to form the data point """ - pass - flags: builtins.int = ... + flags: builtins.int """Flags that apply to this specific data point. See DataPointFlags for the available flags and their meaning. """ - - min: builtins.float = ... + min: builtins.float """min is the minimum value over (start_time, end_time].""" - - max: builtins.float = ... + max: builtins.float """max is the maximum value over (start_time, end_time].""" - - def __init__(self, + def __init__( + self, *, - attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., - start_time_unix_nano : builtins.int = ..., - time_unix_nano : builtins.int = ..., - count : builtins.int = ..., - sum : builtins.float = ..., - bucket_counts : typing.Optional[typing.Iterable[builtins.int]] = ..., - explicit_bounds : typing.Optional[typing.Iterable[builtins.float]] = ..., - exemplars : typing.Optional[typing.Iterable[global___Exemplar]] = ..., - flags : builtins.int = ..., - min : builtins.float = ..., - max : builtins.float = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["_max",b"_max","_min",b"_min","_sum",b"_sum","max",b"max","min",b"min","sum",b"sum"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["_max",b"_max","_min",b"_min","_sum",b"_sum","attributes",b"attributes","bucket_counts",b"bucket_counts","count",b"count","exemplars",b"exemplars","explicit_bounds",b"explicit_bounds","flags",b"flags","max",b"max","min",b"min","start_time_unix_nano",b"start_time_unix_nano","sum",b"sum","time_unix_nano",b"time_unix_nano"]) -> None: ... + attributes: ( + collections.abc.Iterable[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ] + | None + ) = ..., + start_time_unix_nano: builtins.int = ..., + time_unix_nano: builtins.int = ..., + count: builtins.int = ..., + sum: builtins.float | None = ..., + bucket_counts: collections.abc.Iterable[builtins.int] | None = ..., + explicit_bounds: collections.abc.Iterable[builtins.float] | None = ..., + exemplars: collections.abc.Iterable[global___Exemplar] | None = ..., + flags: builtins.int = ..., + min: builtins.float | None = ..., + max: builtins.float | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "_max", + b"_max", + "_min", + b"_min", + "_sum", + b"_sum", + "max", + b"max", + "min", + b"min", + "sum", + b"sum", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "_max", + b"_max", + "_min", + b"_min", + "_sum", + b"_sum", + "attributes", + b"attributes", + "bucket_counts", + b"bucket_counts", + "count", + b"count", + "exemplars", + b"exemplars", + "explicit_bounds", + b"explicit_bounds", + "flags", + b"flags", + "max", + b"max", + "min", + b"min", + "start_time_unix_nano", + b"start_time_unix_nano", + "sum", + b"sum", + "time_unix_nano", + b"time_unix_nano", + ], + ) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_max",b"_max"]) -> typing.Optional[typing_extensions.Literal["max"]]: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["_max", b"_max"] + ) -> typing_extensions.Literal["max"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_min",b"_min"]) -> typing.Optional[typing_extensions.Literal["min"]]: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["_min", b"_min"] + ) -> typing_extensions.Literal["min"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_sum",b"_sum"]) -> typing.Optional[typing_extensions.Literal["sum"]]: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["_sum", b"_sum"] + ) -> typing_extensions.Literal["sum"] | None: ... + global___HistogramDataPoint = HistogramDataPoint +@typing_extensions.final class ExponentialHistogramDataPoint(google.protobuf.message.Message): """ExponentialHistogramDataPoint is a single data point in a timeseries that describes the time-varying values of a ExponentialHistogram of double values. A ExponentialHistogram contains summary statistics for a population of values, it may optionally contain the distribution of those values across a set of buckets. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final class Buckets(google.protobuf.message.Message): """Buckets are a set of bucket counts, encoded in a contiguous array of counts. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + OFFSET_FIELD_NUMBER: builtins.int BUCKET_COUNTS_FIELD_NUMBER: builtins.int - offset: builtins.int = ... + offset: builtins.int """Offset is the bucket index of the first entry in the bucket_counts array. Note: This uses a varint encoding as a simple form of compression. """ - @property - def bucket_counts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + def bucket_counts( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[ + builtins.int + ]: """bucket_counts is an array of count values, where bucket_counts[i] carries the count of the bucket at index (offset+i). bucket_counts[i] is the count of values greater than base^(offset+i) and less than or equal to @@ -787,13 +1140,19 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): especially zeros, so uint64 has been selected to ensure varint encoding. """ - pass - def __init__(self, + + def __init__( + self, *, - offset : builtins.int = ..., - bucket_counts : typing.Optional[typing.Iterable[builtins.int]] = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["bucket_counts",b"bucket_counts","offset",b"offset"]) -> None: ... + offset: builtins.int = ..., + bucket_counts: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "bucket_counts", b"bucket_counts", "offset", b"offset" + ], + ) -> None: ... ATTRIBUTES_FIELD_NUMBER: builtins.int START_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int @@ -810,35 +1169,35 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): MAX_FIELD_NUMBER: builtins.int ZERO_THRESHOLD_FIELD_NUMBER: builtins.int @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ]: """The set of key/value pairs that uniquely identify the timeseries from where this point belongs. The list may be empty (may contain 0 elements). Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - pass - start_time_unix_nano: builtins.int = ... + start_time_unix_nano: builtins.int """StartTimeUnixNano is optional but strongly encouraged, see the the detailed comments above Metric. Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. """ - - time_unix_nano: builtins.int = ... + time_unix_nano: builtins.int """TimeUnixNano is required, see the detailed comments above Metric. Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. """ - - count: builtins.int = ... + count: builtins.int """count is the number of values in the population. Must be non-negative. This value must be equal to the sum of the "bucket_counts" values in the positive and negative Buckets plus the "zero_count" field. """ - - sum: builtins.float = ... + sum: builtins.float """sum of the values in the population. If count is zero then this field must be zero. @@ -848,8 +1207,7 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): doing so. This is specifically to enforce compatibility w/ OpenMetrics, see: https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#histogram """ - - scale: builtins.int = ... + scale: builtins.int """scale describes the resolution of the histogram. Boundaries are located at powers of the base, where: @@ -866,8 +1224,7 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): scale is not restricted by the protocol, as the permissible values depend on the range of the data. """ - - zero_count: builtins.int = ... + zero_count: builtins.int """zero_count is the count of values that are either exactly zero or within the region considered zero by the instrumentation at the tolerated degree of precision. This bucket stores values that @@ -877,33 +1234,31 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): Implementations MAY consider the zero bucket to have probability mass equal to (zero_count / count). """ - @property def positive(self) -> global___ExponentialHistogramDataPoint.Buckets: """positive carries the positive range of exponential bucket counts.""" - pass + @property def negative(self) -> global___ExponentialHistogramDataPoint.Buckets: """negative carries the negative range of exponential bucket counts.""" - pass - flags: builtins.int = ... + flags: builtins.int """Flags that apply to this specific data point. See DataPointFlags for the available flags and their meaning. """ - @property - def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Exemplar]: + def exemplars( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Exemplar + ]: """(Optional) List of exemplars collected from measurements that were used to form the data point """ - pass - min: builtins.float = ... + min: builtins.float """min is the minimum value over (start_time, end_time].""" - - max: builtins.float = ... + max: builtins.float """max is the maximum value over (start_time, end_time].""" - - zero_threshold: builtins.float = ... + zero_threshold: builtins.float """ZeroThreshold may be optionally set to convey the width of the zero region. Where the zero region is defined as the closed interval [-ZeroThreshold, ZeroThreshold]. @@ -911,39 +1266,113 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): expressed using the standard exponential formula as well as values that have been rounded to zero. """ - - def __init__(self, + def __init__( + self, *, - attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., - start_time_unix_nano : builtins.int = ..., - time_unix_nano : builtins.int = ..., - count : builtins.int = ..., - sum : builtins.float = ..., - scale : builtins.int = ..., - zero_count : builtins.int = ..., - positive : typing.Optional[global___ExponentialHistogramDataPoint.Buckets] = ..., - negative : typing.Optional[global___ExponentialHistogramDataPoint.Buckets] = ..., - flags : builtins.int = ..., - exemplars : typing.Optional[typing.Iterable[global___Exemplar]] = ..., - min : builtins.float = ..., - max : builtins.float = ..., - zero_threshold : builtins.float = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["_max",b"_max","_min",b"_min","_sum",b"_sum","max",b"max","min",b"min","negative",b"negative","positive",b"positive","sum",b"sum"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["_max",b"_max","_min",b"_min","_sum",b"_sum","attributes",b"attributes","count",b"count","exemplars",b"exemplars","flags",b"flags","max",b"max","min",b"min","negative",b"negative","positive",b"positive","scale",b"scale","start_time_unix_nano",b"start_time_unix_nano","sum",b"sum","time_unix_nano",b"time_unix_nano","zero_count",b"zero_count","zero_threshold",b"zero_threshold"]) -> None: ... + attributes: ( + collections.abc.Iterable[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ] + | None + ) = ..., + start_time_unix_nano: builtins.int = ..., + time_unix_nano: builtins.int = ..., + count: builtins.int = ..., + sum: builtins.float | None = ..., + scale: builtins.int = ..., + zero_count: builtins.int = ..., + positive: global___ExponentialHistogramDataPoint.Buckets | None = ..., + negative: global___ExponentialHistogramDataPoint.Buckets | None = ..., + flags: builtins.int = ..., + exemplars: collections.abc.Iterable[global___Exemplar] | None = ..., + min: builtins.float | None = ..., + max: builtins.float | None = ..., + zero_threshold: builtins.float = ..., + ) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "_max", + b"_max", + "_min", + b"_min", + "_sum", + b"_sum", + "max", + b"max", + "min", + b"min", + "negative", + b"negative", + "positive", + b"positive", + "sum", + b"sum", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "_max", + b"_max", + "_min", + b"_min", + "_sum", + b"_sum", + "attributes", + b"attributes", + "count", + b"count", + "exemplars", + b"exemplars", + "flags", + b"flags", + "max", + b"max", + "min", + b"min", + "negative", + b"negative", + "positive", + b"positive", + "scale", + b"scale", + "start_time_unix_nano", + b"start_time_unix_nano", + "sum", + b"sum", + "time_unix_nano", + b"time_unix_nano", + "zero_count", + b"zero_count", + "zero_threshold", + b"zero_threshold", + ], + ) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_max",b"_max"]) -> typing.Optional[typing_extensions.Literal["max"]]: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["_max", b"_max"] + ) -> typing_extensions.Literal["max"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_min",b"_min"]) -> typing.Optional[typing_extensions.Literal["min"]]: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["_min", b"_min"] + ) -> typing_extensions.Literal["min"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_sum",b"_sum"]) -> typing.Optional[typing_extensions.Literal["sum"]]: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["_sum", b"_sum"] + ) -> typing_extensions.Literal["sum"] | None: ... + global___ExponentialHistogramDataPoint = ExponentialHistogramDataPoint +@typing_extensions.final class SummaryDataPoint(google.protobuf.message.Message): """SummaryDataPoint is a single data point in a timeseries that describes the time-varying values of a Summary metric. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final class ValueAtQuantile(google.protobuf.message.Message): """Represents the value at a given quantile of a distribution. @@ -954,26 +1383,32 @@ class SummaryDataPoint(google.protobuf.message.Message): See the following issue for more context: https://github.com/open-telemetry/opentelemetry-proto/issues/125 """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + QUANTILE_FIELD_NUMBER: builtins.int VALUE_FIELD_NUMBER: builtins.int - quantile: builtins.float = ... + quantile: builtins.float """The quantile of a distribution. Must be in the interval [0.0, 1.0]. """ - - value: builtins.float = ... + value: builtins.float """The value at the given quantile of a distribution. Quantile values must NOT be negative. """ - - def __init__(self, + def __init__( + self, *, - quantile : builtins.float = ..., - value : builtins.float = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["quantile",b"quantile","value",b"value"]) -> None: ... + quantile: builtins.float = ..., + value: builtins.float = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "quantile", b"quantile", "value", b"value" + ], + ) -> None: ... ATTRIBUTES_FIELD_NUMBER: builtins.int START_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int @@ -983,32 +1418,32 @@ class SummaryDataPoint(google.protobuf.message.Message): QUANTILE_VALUES_FIELD_NUMBER: builtins.int FLAGS_FIELD_NUMBER: builtins.int @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ]: """The set of key/value pairs that uniquely identify the timeseries from where this point belongs. The list may be empty (may contain 0 elements). Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - pass - start_time_unix_nano: builtins.int = ... + start_time_unix_nano: builtins.int """StartTimeUnixNano is optional but strongly encouraged, see the the detailed comments above Metric. Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. """ - - time_unix_nano: builtins.int = ... + time_unix_nano: builtins.int """TimeUnixNano is required, see the detailed comments above Metric. Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. """ - - count: builtins.int = ... + count: builtins.int """count is the number of values in the population. Must be non-negative.""" - - sum: builtins.float = ... + sum: builtins.float """sum of the values in the population. If count is zero then this field must be zero. @@ -1018,38 +1453,70 @@ class SummaryDataPoint(google.protobuf.message.Message): doing so. This is specifically to enforce compatibility w/ OpenMetrics, see: https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#summary """ - @property - def quantile_values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SummaryDataPoint.ValueAtQuantile]: + def quantile_values( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___SummaryDataPoint.ValueAtQuantile + ]: """(Optional) list of values at different quantiles of the distribution calculated from the current snapshot. The quantiles must be strictly increasing. """ - pass - flags: builtins.int = ... + flags: builtins.int """Flags that apply to this specific data point. See DataPointFlags for the available flags and their meaning. """ - - def __init__(self, + def __init__( + self, *, - attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., - start_time_unix_nano : builtins.int = ..., - time_unix_nano : builtins.int = ..., - count : builtins.int = ..., - sum : builtins.float = ..., - quantile_values : typing.Optional[typing.Iterable[global___SummaryDataPoint.ValueAtQuantile]] = ..., - flags : builtins.int = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","count",b"count","flags",b"flags","quantile_values",b"quantile_values","start_time_unix_nano",b"start_time_unix_nano","sum",b"sum","time_unix_nano",b"time_unix_nano"]) -> None: ... + attributes: ( + collections.abc.Iterable[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ] + | None + ) = ..., + start_time_unix_nano: builtins.int = ..., + time_unix_nano: builtins.int = ..., + count: builtins.int = ..., + sum: builtins.float = ..., + quantile_values: ( + collections.abc.Iterable[global___SummaryDataPoint.ValueAtQuantile] + | None + ) = ..., + flags: builtins.int = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "attributes", + b"attributes", + "count", + b"count", + "flags", + b"flags", + "quantile_values", + b"quantile_values", + "start_time_unix_nano", + b"start_time_unix_nano", + "sum", + b"sum", + "time_unix_nano", + b"time_unix_nano", + ], + ) -> None: ... + global___SummaryDataPoint = SummaryDataPoint +@typing_extensions.final class Exemplar(google.protobuf.message.Message): """A representation of an exemplar, which is a sample input measurement. Exemplars also hold information about the environment when the measurement was recorded, for example the span and trace ID of the active span when the exemplar was recorded. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + FILTERED_ATTRIBUTES_FIELD_NUMBER: builtins.int TIME_UNIX_NANO_FIELD_NUMBER: builtins.int AS_DOUBLE_FIELD_NUMBER: builtins.int @@ -1057,43 +1524,75 @@ class Exemplar(google.protobuf.message.Message): SPAN_ID_FIELD_NUMBER: builtins.int TRACE_ID_FIELD_NUMBER: builtins.int @property - def filtered_attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + def filtered_attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ]: """The set of key/value pairs that were filtered out by the aggregator, but recorded alongside the original measurement. Only key/value pairs that were filtered out by the aggregator should be included """ - pass - time_unix_nano: builtins.int = ... + time_unix_nano: builtins.int """time_unix_nano is the exact time when this exemplar was recorded Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. """ - - as_double: builtins.float = ... - as_int: builtins.int = ... - span_id: builtins.bytes = ... + as_double: builtins.float + as_int: builtins.int + span_id: builtins.bytes """(Optional) Span ID of the exemplar trace. span_id may be missing if the measurement is not recorded inside a trace or if the trace is not sampled. """ - - trace_id: builtins.bytes = ... + trace_id: builtins.bytes """(Optional) Trace ID of the exemplar trace. trace_id may be missing if the measurement is not recorded inside a trace or if the trace is not sampled. """ - - def __init__(self, + def __init__( + self, *, - filtered_attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., - time_unix_nano : builtins.int = ..., - as_double : builtins.float = ..., - as_int : builtins.int = ..., - span_id : builtins.bytes = ..., - trace_id : builtins.bytes = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["as_double",b"as_double","as_int",b"as_int","value",b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["as_double",b"as_double","as_int",b"as_int","filtered_attributes",b"filtered_attributes","span_id",b"span_id","time_unix_nano",b"time_unix_nano","trace_id",b"trace_id","value",b"value"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["as_double","as_int"]]: ... + filtered_attributes: ( + collections.abc.Iterable[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ] + | None + ) = ..., + time_unix_nano: builtins.int = ..., + as_double: builtins.float = ..., + as_int: builtins.int = ..., + span_id: builtins.bytes = ..., + trace_id: builtins.bytes = ..., + ) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "as_double", b"as_double", "as_int", b"as_int", "value", b"value" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "as_double", + b"as_double", + "as_int", + b"as_int", + "filtered_attributes", + b"filtered_attributes", + "span_id", + b"span_id", + "time_unix_nano", + b"time_unix_nano", + "trace_id", + b"trace_id", + "value", + b"value", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["value", b"value"] + ) -> typing_extensions.Literal["as_double", "as_int"] | None: ... + global___Exemplar = Exemplar diff --git a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py index 728e9114dc2..ffc7a5d3af1 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py @@ -1,36 +1,37 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: opentelemetry/proto/resource/v1/resource.proto +# Protobuf Python Version: 5.26.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.opentelemetry/proto/resource/v1/resource.proto\x12\x1fopentelemetry.proto.resource.v1\x1a*opentelemetry/proto/common/v1/common.proto\"i\n\x08Resource\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x02 \x01(\rB\x83\x01\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\x01Z*go.opentelemetry.io/proto/otlp/resource/v1\xaa\x02\x1fOpenTelemetry.Proto.Resource.V1b\x06proto3') - - +from opentelemetry.proto.common.v1 import ( + common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, +) -_RESOURCE = DESCRIPTOR.message_types_by_name['Resource'] -Resource = _reflection.GeneratedProtocolMessageType('Resource', (_message.Message,), { - 'DESCRIPTOR' : _RESOURCE, - '__module__' : 'opentelemetry.proto.resource.v1.resource_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.resource.v1.Resource) - }) -_sym_db.RegisterMessage(Resource) -if _descriptor._USE_C_DESCRIPTORS == False: +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n.opentelemetry/proto/resource/v1/resource.proto\x12\x1fopentelemetry.proto.resource.v1\x1a*opentelemetry/proto/common/v1/common.proto"i\n\x08Resource\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x02 \x01(\rB\x83\x01\n"io.opentelemetry.proto.resource.v1B\rResourceProtoP\x01Z*go.opentelemetry.io/proto/otlp/resource/v1\xaa\x02\x1fOpenTelemetry.Proto.Resource.V1b\x06proto3' +) - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\001Z*go.opentelemetry.io/proto/otlp/resource/v1\252\002\037OpenTelemetry.Proto.Resource.V1' - _RESOURCE._serialized_start=127 - _RESOURCE._serialized_end=232 +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.resource.v1.resource_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b'\n"io.opentelemetry.proto.resource.v1B\rResourceProtoP\001Z*go.opentelemetry.io/proto/otlp/resource/v1\252\002\037OpenTelemetry.Proto.Resource.V1' + ) + _globals["_RESOURCE"]._serialized_start = 127 + _globals["_RESOURCE"]._serialized_end = 232 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi index f660c7f2294..e7d6979bb50 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi @@ -1,38 +1,77 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file +Copyright 2019, OpenTelemetry Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. """ + import builtins +import collections.abc import google.protobuf.descriptor import google.protobuf.internal.containers import google.protobuf.message import opentelemetry.proto.common.v1.common_pb2 -import typing -import typing_extensions +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor +@typing_extensions.final class Resource(google.protobuf.message.Message): """Resource information.""" - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + ATTRIBUTES_FIELD_NUMBER: builtins.int DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ]: """Set of attributes that describe the resource. Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - pass - dropped_attributes_count: builtins.int = ... + dropped_attributes_count: builtins.int """dropped_attributes_count is the number of dropped attributes. If the value is 0, then no attributes were dropped. """ - - def __init__(self, + def __init__( + self, *, - attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., - dropped_attributes_count : builtins.int = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","dropped_attributes_count",b"dropped_attributes_count"]) -> None: ... + attributes: ( + collections.abc.Iterable[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ] + | None + ) = ..., + dropped_attributes_count: builtins.int = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "attributes", + b"attributes", + "dropped_attributes_count", + b"dropped_attributes_count", + ], + ) -> None: ... + global___Resource = Resource diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py index 5a9fec5eaa3..f573faa6ff3 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py @@ -1,114 +1,58 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: opentelemetry/proto/trace/v1/trace.proto +# Protobuf Python Version: 5.26.1 """Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2 -from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(opentelemetry/proto/trace/v1/trace.proto\x12\x1copentelemetry.proto.trace.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"Q\n\nTracesData\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans\"\xa7\x01\n\rResourceSpans\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12=\n\x0bscope_spans\x18\x02 \x03(\x0b\x32(.opentelemetry.proto.trace.v1.ScopeSpans\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\x97\x01\n\nScopeSpans\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x31\n\x05spans\x18\x02 \x03(\x0b\x32\".opentelemetry.proto.trace.v1.Span\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\x84\x08\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12\x16\n\x0eparent_span_id\x18\x04 \x01(\x0c\x12\r\n\x05\x66lags\x18\x10 \x01(\x07\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x39\n\x04kind\x18\x06 \x01(\x0e\x32+.opentelemetry.proto.trace.v1.Span.SpanKind\x12\x1c\n\x14start_time_unix_nano\x18\x07 \x01(\x06\x12\x1a\n\x12\x65nd_time_unix_nano\x18\x08 \x01(\x06\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\n \x01(\r\x12\x38\n\x06\x65vents\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.trace.v1.Span.Event\x12\x1c\n\x14\x64ropped_events_count\x18\x0c \x01(\r\x12\x36\n\x05links\x18\r \x03(\x0b\x32\'.opentelemetry.proto.trace.v1.Span.Link\x12\x1b\n\x13\x64ropped_links_count\x18\x0e \x01(\r\x12\x34\n\x06status\x18\x0f \x01(\x0b\x32$.opentelemetry.proto.trace.v1.Status\x1a\x8c\x01\n\x05\x45vent\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x0c\n\x04name\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\r\x1a\xac\x01\n\x04Link\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12;\n\nattributes\x18\x04 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x05 \x01(\r\x12\r\n\x05\x66lags\x18\x06 \x01(\x07\"\x99\x01\n\x08SpanKind\x12\x19\n\x15SPAN_KIND_UNSPECIFIED\x10\x00\x12\x16\n\x12SPAN_KIND_INTERNAL\x10\x01\x12\x14\n\x10SPAN_KIND_SERVER\x10\x02\x12\x14\n\x10SPAN_KIND_CLIENT\x10\x03\x12\x16\n\x12SPAN_KIND_PRODUCER\x10\x04\x12\x16\n\x12SPAN_KIND_CONSUMER\x10\x05\"\xae\x01\n\x06Status\x12\x0f\n\x07message\x18\x02 \x01(\t\x12=\n\x04\x63ode\x18\x03 \x01(\x0e\x32/.opentelemetry.proto.trace.v1.Status.StatusCode\"N\n\nStatusCode\x12\x15\n\x11STATUS_CODE_UNSET\x10\x00\x12\x12\n\x0eSTATUS_CODE_OK\x10\x01\x12\x15\n\x11STATUS_CODE_ERROR\x10\x02J\x04\x08\x01\x10\x02*\x9c\x01\n\tSpanFlags\x12\x19\n\x15SPAN_FLAGS_DO_NOT_USE\x10\x00\x12 \n\x1bSPAN_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x12*\n%SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK\x10\x80\x02\x12&\n!SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK\x10\x80\x04\x42w\n\x1fio.opentelemetry.proto.trace.v1B\nTraceProtoP\x01Z\'go.opentelemetry.io/proto/otlp/trace/v1\xaa\x02\x1cOpenTelemetry.Proto.Trace.V1b\x06proto3') - -_SPANFLAGS = DESCRIPTOR.enum_types_by_name['SpanFlags'] -SpanFlags = enum_type_wrapper.EnumTypeWrapper(_SPANFLAGS) -SPAN_FLAGS_DO_NOT_USE = 0 -SPAN_FLAGS_TRACE_FLAGS_MASK = 255 -SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK = 256 -SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK = 512 - - -_TRACESDATA = DESCRIPTOR.message_types_by_name['TracesData'] -_RESOURCESPANS = DESCRIPTOR.message_types_by_name['ResourceSpans'] -_SCOPESPANS = DESCRIPTOR.message_types_by_name['ScopeSpans'] -_SPAN = DESCRIPTOR.message_types_by_name['Span'] -_SPAN_EVENT = _SPAN.nested_types_by_name['Event'] -_SPAN_LINK = _SPAN.nested_types_by_name['Link'] -_STATUS = DESCRIPTOR.message_types_by_name['Status'] -_SPAN_SPANKIND = _SPAN.enum_types_by_name['SpanKind'] -_STATUS_STATUSCODE = _STATUS.enum_types_by_name['StatusCode'] -TracesData = _reflection.GeneratedProtocolMessageType('TracesData', (_message.Message,), { - 'DESCRIPTOR' : _TRACESDATA, - '__module__' : 'opentelemetry.proto.trace.v1.trace_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.TracesData) - }) -_sym_db.RegisterMessage(TracesData) - -ResourceSpans = _reflection.GeneratedProtocolMessageType('ResourceSpans', (_message.Message,), { - 'DESCRIPTOR' : _RESOURCESPANS, - '__module__' : 'opentelemetry.proto.trace.v1.trace_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.ResourceSpans) - }) -_sym_db.RegisterMessage(ResourceSpans) - -ScopeSpans = _reflection.GeneratedProtocolMessageType('ScopeSpans', (_message.Message,), { - 'DESCRIPTOR' : _SCOPESPANS, - '__module__' : 'opentelemetry.proto.trace.v1.trace_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.ScopeSpans) - }) -_sym_db.RegisterMessage(ScopeSpans) - -Span = _reflection.GeneratedProtocolMessageType('Span', (_message.Message,), { - - 'Event' : _reflection.GeneratedProtocolMessageType('Event', (_message.Message,), { - 'DESCRIPTOR' : _SPAN_EVENT, - '__module__' : 'opentelemetry.proto.trace.v1.trace_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.Span.Event) - }) - , - - 'Link' : _reflection.GeneratedProtocolMessageType('Link', (_message.Message,), { - 'DESCRIPTOR' : _SPAN_LINK, - '__module__' : 'opentelemetry.proto.trace.v1.trace_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.Span.Link) - }) - , - 'DESCRIPTOR' : _SPAN, - '__module__' : 'opentelemetry.proto.trace.v1.trace_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.Span) - }) -_sym_db.RegisterMessage(Span) -_sym_db.RegisterMessage(Span.Event) -_sym_db.RegisterMessage(Span.Link) - -Status = _reflection.GeneratedProtocolMessageType('Status', (_message.Message,), { - 'DESCRIPTOR' : _STATUS, - '__module__' : 'opentelemetry.proto.trace.v1.trace_pb2' - # @@protoc_insertion_point(class_scope:opentelemetry.proto.trace.v1.Status) - }) -_sym_db.RegisterMessage(Status) - -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\037io.opentelemetry.proto.trace.v1B\nTraceProtoP\001Z\'go.opentelemetry.io/proto/otlp/trace/v1\252\002\034OpenTelemetry.Proto.Trace.V1' - _SPANFLAGS._serialized_start=1782 - _SPANFLAGS._serialized_end=1938 - _TRACESDATA._serialized_start=166 - _TRACESDATA._serialized_end=247 - _RESOURCESPANS._serialized_start=250 - _RESOURCESPANS._serialized_end=417 - _SCOPESPANS._serialized_start=420 - _SCOPESPANS._serialized_end=571 - _SPAN._serialized_start=574 - _SPAN._serialized_end=1602 - _SPAN_EVENT._serialized_start=1131 - _SPAN_EVENT._serialized_end=1271 - _SPAN_LINK._serialized_start=1274 - _SPAN_LINK._serialized_end=1446 - _SPAN_SPANKIND._serialized_start=1449 - _SPAN_SPANKIND._serialized_end=1602 - _STATUS._serialized_start=1605 - _STATUS._serialized_end=1779 - _STATUS_STATUSCODE._serialized_start=1695 - _STATUS_STATUSCODE._serialized_end=1773 +from opentelemetry.proto.common.v1 import ( + common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, +) +from opentelemetry.proto.resource.v1 import ( + resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2, +) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n(opentelemetry/proto/trace/v1/trace.proto\x12\x1copentelemetry.proto.trace.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto"Q\n\nTracesData\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans"\xa7\x01\n\rResourceSpans\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12=\n\x0bscope_spans\x18\x02 \x03(\x0b\x32(.opentelemetry.proto.trace.v1.ScopeSpans\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07"\x97\x01\n\nScopeSpans\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x31\n\x05spans\x18\x02 \x03(\x0b\x32".opentelemetry.proto.trace.v1.Span\x12\x12\n\nschema_url\x18\x03 \x01(\t"\x84\x08\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12\x16\n\x0eparent_span_id\x18\x04 \x01(\x0c\x12\r\n\x05\x66lags\x18\x10 \x01(\x07\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x39\n\x04kind\x18\x06 \x01(\x0e\x32+.opentelemetry.proto.trace.v1.Span.SpanKind\x12\x1c\n\x14start_time_unix_nano\x18\x07 \x01(\x06\x12\x1a\n\x12\x65nd_time_unix_nano\x18\x08 \x01(\x06\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\n \x01(\r\x12\x38\n\x06\x65vents\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.trace.v1.Span.Event\x12\x1c\n\x14\x64ropped_events_count\x18\x0c \x01(\r\x12\x36\n\x05links\x18\r \x03(\x0b\x32\'.opentelemetry.proto.trace.v1.Span.Link\x12\x1b\n\x13\x64ropped_links_count\x18\x0e \x01(\r\x12\x34\n\x06status\x18\x0f \x01(\x0b\x32$.opentelemetry.proto.trace.v1.Status\x1a\x8c\x01\n\x05\x45vent\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x0c\n\x04name\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\r\x1a\xac\x01\n\x04Link\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12;\n\nattributes\x18\x04 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x05 \x01(\r\x12\r\n\x05\x66lags\x18\x06 \x01(\x07"\x99\x01\n\x08SpanKind\x12\x19\n\x15SPAN_KIND_UNSPECIFIED\x10\x00\x12\x16\n\x12SPAN_KIND_INTERNAL\x10\x01\x12\x14\n\x10SPAN_KIND_SERVER\x10\x02\x12\x14\n\x10SPAN_KIND_CLIENT\x10\x03\x12\x16\n\x12SPAN_KIND_PRODUCER\x10\x04\x12\x16\n\x12SPAN_KIND_CONSUMER\x10\x05"\xae\x01\n\x06Status\x12\x0f\n\x07message\x18\x02 \x01(\t\x12=\n\x04\x63ode\x18\x03 \x01(\x0e\x32/.opentelemetry.proto.trace.v1.Status.StatusCode"N\n\nStatusCode\x12\x15\n\x11STATUS_CODE_UNSET\x10\x00\x12\x12\n\x0eSTATUS_CODE_OK\x10\x01\x12\x15\n\x11STATUS_CODE_ERROR\x10\x02J\x04\x08\x01\x10\x02*\x9c\x01\n\tSpanFlags\x12\x19\n\x15SPAN_FLAGS_DO_NOT_USE\x10\x00\x12 \n\x1bSPAN_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x12*\n%SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK\x10\x80\x02\x12&\n!SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK\x10\x80\x04\x42w\n\x1fio.opentelemetry.proto.trace.v1B\nTraceProtoP\x01Z\'go.opentelemetry.io/proto/otlp/trace/v1\xaa\x02\x1cOpenTelemetry.Proto.Trace.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.trace.v1.trace_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n\037io.opentelemetry.proto.trace.v1B\nTraceProtoP\001Z'go.opentelemetry.io/proto/otlp/trace/v1\252\002\034OpenTelemetry.Proto.Trace.V1" + ) + _globals["_SPANFLAGS"]._serialized_start = 1782 + _globals["_SPANFLAGS"]._serialized_end = 1938 + _globals["_TRACESDATA"]._serialized_start = 166 + _globals["_TRACESDATA"]._serialized_end = 247 + _globals["_RESOURCESPANS"]._serialized_start = 250 + _globals["_RESOURCESPANS"]._serialized_end = 417 + _globals["_SCOPESPANS"]._serialized_start = 420 + _globals["_SCOPESPANS"]._serialized_end = 571 + _globals["_SPAN"]._serialized_start = 574 + _globals["_SPAN"]._serialized_end = 1602 + _globals["_SPAN_EVENT"]._serialized_start = 1131 + _globals["_SPAN_EVENT"]._serialized_end = 1271 + _globals["_SPAN_LINK"]._serialized_start = 1274 + _globals["_SPAN_LINK"]._serialized_end = 1446 + _globals["_SPAN_SPANKIND"]._serialized_start = 1449 + _globals["_SPAN_SPANKIND"]._serialized_end = 1602 + _globals["_STATUS"]._serialized_start = 1605 + _globals["_STATUS"]._serialized_end = 1779 + _globals["_STATUS_STATUSCODE"]._serialized_start = 1695 + _globals["_STATUS_STATUSCODE"]._serialized_end = 1773 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi index b1aaf784c9c..d75e7e03181 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi @@ -1,18 +1,62 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file +Copyright 2019, OpenTelemetry Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. """ + import builtins +import collections.abc import google.protobuf.descriptor import google.protobuf.internal.containers import google.protobuf.internal.enum_type_wrapper import google.protobuf.message import opentelemetry.proto.common.v1.common_pb2 import opentelemetry.proto.resource.v1.resource_pb2 +import sys import typing -import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _SpanFlags: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _SpanFlagsEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + _SpanFlags.ValueType + ], + builtins.type, +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + SPAN_FLAGS_DO_NOT_USE: _SpanFlags.ValueType # 0 + """The zero value for the enum. Should not be used for comparisons. + Instead use bitwise "and" with the appropriate mask as shown above. + """ + SPAN_FLAGS_TRACE_FLAGS_MASK: _SpanFlags.ValueType # 255 + """Bits 0-7 are used for trace flags.""" + SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK: _SpanFlags.ValueType # 256 + """Bits 8 and 9 are used to indicate that the parent span or link span is remote. + Bit 8 (`HAS_IS_REMOTE`) indicates whether the value is known. + Bit 9 (`IS_REMOTE`) indicates whether the span or link is remote. + """ + SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK: _SpanFlags.ValueType # 512 class SpanFlags(_SpanFlags, metaclass=_SpanFlagsEnumTypeWrapper): """SpanFlags represents constants used to interpret the @@ -30,45 +74,22 @@ class SpanFlags(_SpanFlags, metaclass=_SpanFlagsEnumTypeWrapper): field, consequently consumers should not rely on the absence of a particular flag bit to indicate the presence of a particular feature. """ - pass -class _SpanFlags: - V = typing.NewType('V', builtins.int) -class _SpanFlagsEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SpanFlags.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - SPAN_FLAGS_DO_NOT_USE = SpanFlags.V(0) - """The zero value for the enum. Should not be used for comparisons. - Instead use bitwise "and" with the appropriate mask as shown above. - """ - SPAN_FLAGS_TRACE_FLAGS_MASK = SpanFlags.V(255) - """Bits 0-7 are used for trace flags.""" - - SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK = SpanFlags.V(256) - """Bits 8 and 9 are used to indicate that the parent span or link span is remote. - Bit 8 (`HAS_IS_REMOTE`) indicates whether the value is known. - Bit 9 (`IS_REMOTE`) indicates whether the span or link is remote. - """ - - SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK = SpanFlags.V(512) - -SPAN_FLAGS_DO_NOT_USE = SpanFlags.V(0) +SPAN_FLAGS_DO_NOT_USE: SpanFlags.ValueType # 0 """The zero value for the enum. Should not be used for comparisons. Instead use bitwise "and" with the appropriate mask as shown above. """ - -SPAN_FLAGS_TRACE_FLAGS_MASK = SpanFlags.V(255) +SPAN_FLAGS_TRACE_FLAGS_MASK: SpanFlags.ValueType # 255 """Bits 0-7 are used for trace flags.""" - -SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK = SpanFlags.V(256) +SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK: SpanFlags.ValueType # 256 """Bits 8 and 9 are used to indicate that the parent span or link span is remote. Bit 8 (`HAS_IS_REMOTE`) indicates whether the value is known. Bit 9 (`IS_REMOTE`) indicates whether the span or link is remote. """ - -SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK = SpanFlags.V(512) +SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK: SpanFlags.ValueType # 512 global___SpanFlags = SpanFlags - +@typing_extensions.final class TracesData(google.protobuf.message.Message): """TracesData represents the traces data that can be stored in a persistent storage, OR can be embedded by other protocols that transfer OTLP traces data but do @@ -81,246 +102,326 @@ class TracesData(google.protobuf.message.Message): When new fields are added into this message, the OTLP request MUST be updated as well. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + RESOURCE_SPANS_FIELD_NUMBER: builtins.int @property - def resource_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceSpans]: + def resource_spans( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___ResourceSpans + ]: """An array of ResourceSpans. For data coming from a single resource this array will typically contain one element. Intermediary nodes that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - pass - def __init__(self, + + def __init__( + self, *, - resource_spans : typing.Optional[typing.Iterable[global___ResourceSpans]] = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["resource_spans",b"resource_spans"]) -> None: ... + resource_spans: ( + collections.abc.Iterable[global___ResourceSpans] | None + ) = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "resource_spans", b"resource_spans" + ], + ) -> None: ... + global___TracesData = TracesData +@typing_extensions.final class ResourceSpans(google.protobuf.message.Message): """A collection of ScopeSpans from a Resource.""" - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + RESOURCE_FIELD_NUMBER: builtins.int SCOPE_SPANS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: + def resource( + self, + ) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: """The resource for the spans in this message. If this field is not set then no resource info is known. """ - pass + @property - def scope_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeSpans]: + def scope_spans( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___ScopeSpans + ]: """A list of ScopeSpans that originate from a resource.""" - pass - schema_url: typing.Text = ... + schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the resource data is recorded in. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to the data in the "resource" field. It does not apply to the data in the "scope_spans" field which have their own schema_url field. """ - - def __init__(self, + def __init__( + self, *, - resource : typing.Optional[opentelemetry.proto.resource.v1.resource_pb2.Resource] = ..., - scope_spans : typing.Optional[typing.Iterable[global___ScopeSpans]] = ..., - schema_url : typing.Text = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["resource",b"resource"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["resource",b"resource","schema_url",b"schema_url","scope_spans",b"scope_spans"]) -> None: ... + resource: ( + opentelemetry.proto.resource.v1.resource_pb2.Resource | None + ) = ..., + scope_spans: ( + collections.abc.Iterable[global___ScopeSpans] | None + ) = ..., + schema_url: builtins.str = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["resource", b"resource"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "resource", + b"resource", + "schema_url", + b"schema_url", + "scope_spans", + b"scope_spans", + ], + ) -> None: ... + global___ResourceSpans = ResourceSpans +@typing_extensions.final class ScopeSpans(google.protobuf.message.Message): """A collection of Spans produced by an InstrumentationScope.""" - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + SCOPE_FIELD_NUMBER: builtins.int SPANS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def scope(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: + def scope( + self, + ) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: """The instrumentation scope information for the spans in this message. Semantically when InstrumentationScope isn't set, it is equivalent with an empty instrumentation scope name (unknown). """ - pass + @property - def spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span]: + def spans( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Span + ]: """A list of Spans that originate from an instrumentation scope.""" - pass - schema_url: typing.Text = ... + schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the span data is recorded in. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to all spans and span events in the "spans" field. """ - - def __init__(self, + def __init__( + self, *, - scope : typing.Optional[opentelemetry.proto.common.v1.common_pb2.InstrumentationScope] = ..., - spans : typing.Optional[typing.Iterable[global___Span]] = ..., - schema_url : typing.Text = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["scope",b"scope"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["schema_url",b"schema_url","scope",b"scope","spans",b"spans"]) -> None: ... + scope: ( + opentelemetry.proto.common.v1.common_pb2.InstrumentationScope + | None + ) = ..., + spans: collections.abc.Iterable[global___Span] | None = ..., + schema_url: builtins.str = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["scope", b"scope"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "schema_url", b"schema_url", "scope", b"scope", "spans", b"spans" + ], + ) -> None: ... + global___ScopeSpans = ScopeSpans +@typing_extensions.final class Span(google.protobuf.message.Message): """A Span represents a single operation performed by a single component of the system. The next available field id is 17. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - class SpanKind(_SpanKind, metaclass=_SpanKindEnumTypeWrapper): - """SpanKind is the type of span. Can be used to specify additional relationships between spans - in addition to a parent/child relationship. - """ - pass + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class _SpanKind: - V = typing.NewType('V', builtins.int) - class _SpanKindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SpanKind.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - SPAN_KIND_UNSPECIFIED = Span.SpanKind.V(0) + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _SpanKindEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + Span._SpanKind.ValueType + ], + builtins.type, + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + SPAN_KIND_UNSPECIFIED: Span._SpanKind.ValueType # 0 """Unspecified. Do NOT use as default. Implementations MAY assume SpanKind to be INTERNAL when receiving UNSPECIFIED. """ - - SPAN_KIND_INTERNAL = Span.SpanKind.V(1) + SPAN_KIND_INTERNAL: Span._SpanKind.ValueType # 1 """Indicates that the span represents an internal operation within an application, as opposed to an operation happening at the boundaries. Default value. """ - - SPAN_KIND_SERVER = Span.SpanKind.V(2) + SPAN_KIND_SERVER: Span._SpanKind.ValueType # 2 """Indicates that the span covers server-side handling of an RPC or other remote network request. """ - - SPAN_KIND_CLIENT = Span.SpanKind.V(3) + SPAN_KIND_CLIENT: Span._SpanKind.ValueType # 3 """Indicates that the span describes a request to some remote service.""" - - SPAN_KIND_PRODUCER = Span.SpanKind.V(4) + SPAN_KIND_PRODUCER: Span._SpanKind.ValueType # 4 """Indicates that the span describes a producer sending a message to a broker. Unlike CLIENT and SERVER, there is often no direct critical path latency relationship between producer and consumer spans. A PRODUCER span ends when the message was accepted by the broker while the logical processing of the message might span a much longer time. """ - - SPAN_KIND_CONSUMER = Span.SpanKind.V(5) + SPAN_KIND_CONSUMER: Span._SpanKind.ValueType # 5 """Indicates that the span describes consumer receiving a message from a broker. Like the PRODUCER kind, there is often no direct critical path latency relationship between producer and consumer spans. """ + class SpanKind(_SpanKind, metaclass=_SpanKindEnumTypeWrapper): + """SpanKind is the type of span. Can be used to specify additional relationships between spans + in addition to a parent/child relationship. + """ - SPAN_KIND_UNSPECIFIED = Span.SpanKind.V(0) + SPAN_KIND_UNSPECIFIED: Span.SpanKind.ValueType # 0 """Unspecified. Do NOT use as default. Implementations MAY assume SpanKind to be INTERNAL when receiving UNSPECIFIED. """ - - SPAN_KIND_INTERNAL = Span.SpanKind.V(1) + SPAN_KIND_INTERNAL: Span.SpanKind.ValueType # 1 """Indicates that the span represents an internal operation within an application, as opposed to an operation happening at the boundaries. Default value. """ - - SPAN_KIND_SERVER = Span.SpanKind.V(2) + SPAN_KIND_SERVER: Span.SpanKind.ValueType # 2 """Indicates that the span covers server-side handling of an RPC or other remote network request. """ - - SPAN_KIND_CLIENT = Span.SpanKind.V(3) + SPAN_KIND_CLIENT: Span.SpanKind.ValueType # 3 """Indicates that the span describes a request to some remote service.""" - - SPAN_KIND_PRODUCER = Span.SpanKind.V(4) + SPAN_KIND_PRODUCER: Span.SpanKind.ValueType # 4 """Indicates that the span describes a producer sending a message to a broker. Unlike CLIENT and SERVER, there is often no direct critical path latency relationship between producer and consumer spans. A PRODUCER span ends when the message was accepted by the broker while the logical processing of the message might span a much longer time. """ - - SPAN_KIND_CONSUMER = Span.SpanKind.V(5) + SPAN_KIND_CONSUMER: Span.SpanKind.ValueType # 5 """Indicates that the span describes consumer receiving a message from a broker. Like the PRODUCER kind, there is often no direct critical path latency relationship between producer and consumer spans. """ - + @typing_extensions.final class Event(google.protobuf.message.Message): """Event is a time-stamped annotation of the span, consisting of user-supplied text description and key-value pairs. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + TIME_UNIX_NANO_FIELD_NUMBER: builtins.int NAME_FIELD_NUMBER: builtins.int ATTRIBUTES_FIELD_NUMBER: builtins.int DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int - time_unix_nano: builtins.int = ... + time_unix_nano: builtins.int """time_unix_nano is the time the event occurred.""" - - name: typing.Text = ... + name: builtins.str """name of the event. This field is semantically required to be set to non-empty string. """ - @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ]: """attributes is a collection of attribute key/value pairs on the event. Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - pass - dropped_attributes_count: builtins.int = ... + dropped_attributes_count: builtins.int """dropped_attributes_count is the number of dropped attributes. If the value is 0, then no attributes were dropped. """ - - def __init__(self, + def __init__( + self, *, - time_unix_nano : builtins.int = ..., - name : typing.Text = ..., - attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., - dropped_attributes_count : builtins.int = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","dropped_attributes_count",b"dropped_attributes_count","name",b"name","time_unix_nano",b"time_unix_nano"]) -> None: ... + time_unix_nano: builtins.int = ..., + name: builtins.str = ..., + attributes: ( + collections.abc.Iterable[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ] + | None + ) = ..., + dropped_attributes_count: builtins.int = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "attributes", + b"attributes", + "dropped_attributes_count", + b"dropped_attributes_count", + "name", + b"name", + "time_unix_nano", + b"time_unix_nano", + ], + ) -> None: ... + @typing_extensions.final class Link(google.protobuf.message.Message): """A pointer from the current span to another span in the same trace or in a different trace. For example, this can be used in batching operations, where a single batch handler processes multiple requests from different traces or when the handler receives a request from a different project. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + TRACE_ID_FIELD_NUMBER: builtins.int SPAN_ID_FIELD_NUMBER: builtins.int TRACE_STATE_FIELD_NUMBER: builtins.int ATTRIBUTES_FIELD_NUMBER: builtins.int DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int FLAGS_FIELD_NUMBER: builtins.int - trace_id: builtins.bytes = ... + trace_id: builtins.bytes """A unique identifier of a trace that this linked span is part of. The ID is a 16-byte array. """ - - span_id: builtins.bytes = ... + span_id: builtins.bytes """A unique identifier for the linked span. The ID is an 8-byte array.""" - - trace_state: typing.Text = ... + trace_state: builtins.str """The trace_state associated with the link.""" - @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ]: """attributes is a collection of attribute key/value pairs on the link. Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - pass - dropped_attributes_count: builtins.int = ... + dropped_attributes_count: builtins.int """dropped_attributes_count is the number of dropped attributes. If the value is 0, then no attributes were dropped. """ - - flags: builtins.int = ... + flags: builtins.int """Flags, a bit field. Bits 0-7 (8 least significant bits) are the trace flags as defined in W3C Trace @@ -339,17 +440,38 @@ class Span(google.protobuf.message.Message): [Optional]. """ - - def __init__(self, + def __init__( + self, *, - trace_id : builtins.bytes = ..., - span_id : builtins.bytes = ..., - trace_state : typing.Text = ..., - attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., - dropped_attributes_count : builtins.int = ..., - flags : builtins.int = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","dropped_attributes_count",b"dropped_attributes_count","flags",b"flags","span_id",b"span_id","trace_id",b"trace_id","trace_state",b"trace_state"]) -> None: ... + trace_id: builtins.bytes = ..., + span_id: builtins.bytes = ..., + trace_state: builtins.str = ..., + attributes: ( + collections.abc.Iterable[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ] + | None + ) = ..., + dropped_attributes_count: builtins.int = ..., + flags: builtins.int = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "attributes", + b"attributes", + "dropped_attributes_count", + b"dropped_attributes_count", + "flags", + b"flags", + "span_id", + b"span_id", + "trace_id", + b"trace_id", + "trace_state", + b"trace_state", + ], + ) -> None: ... TRACE_ID_FIELD_NUMBER: builtins.int SPAN_ID_FIELD_NUMBER: builtins.int @@ -367,7 +489,7 @@ class Span(google.protobuf.message.Message): LINKS_FIELD_NUMBER: builtins.int DROPPED_LINKS_COUNT_FIELD_NUMBER: builtins.int STATUS_FIELD_NUMBER: builtins.int - trace_id: builtins.bytes = ... + trace_id: builtins.bytes """A unique identifier for a trace. All spans from the same trace share the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes OR of length other than 16 bytes is considered invalid (empty string in OTLP/JSON @@ -375,8 +497,7 @@ class Span(google.protobuf.message.Message): This field is required. """ - - span_id: builtins.bytes = ... + span_id: builtins.bytes """A unique identifier for a span within a trace, assigned when the span is created. The ID is an 8-byte array. An ID with all zeroes OR of length other than 8 bytes is considered invalid (empty string in OTLP/JSON @@ -384,19 +505,16 @@ class Span(google.protobuf.message.Message): This field is required. """ - - trace_state: typing.Text = ... + trace_state: builtins.str """trace_state conveys information about request position in multiple distributed tracing graphs. It is a trace_state in w3c-trace-context format: https://www.w3.org/TR/trace-context/#tracestate-header See also https://github.com/w3c/distributed-tracing for more details about this field. """ - - parent_span_id: builtins.bytes = ... + parent_span_id: builtins.bytes """The `span_id` of this span's parent span. If this is a root span, then this field must be empty. The ID is an 8-byte array. """ - - flags: builtins.int = ... + flags: builtins.int """Flags, a bit field. Bits 0-7 (8 least significant bits) are the trace flags as defined in W3C Trace @@ -419,8 +537,7 @@ class Span(google.protobuf.message.Message): [Optional]. """ - - name: typing.Text = ... + name: builtins.str """A description of the span's operation. For example, the name can be a qualified method name or a file name @@ -433,14 +550,12 @@ class Span(google.protobuf.message.Message): This field is required. """ - - kind: global___Span.SpanKind.V = ... + kind: global___Span.SpanKind.ValueType """Distinguishes between spans generated in a particular context. For example, two spans with the same name may be distinguished using `CLIENT` (caller) and `SERVER` (callee) to identify queueing latency associated with the span. """ - - start_time_unix_nano: builtins.int = ... + start_time_unix_nano: builtins.int """start_time_unix_nano is the start time of the span. On the client side, this is the time kept by the local machine where the span execution starts. On the server side, this is the time when the server's application handler starts running. @@ -448,8 +563,7 @@ class Span(google.protobuf.message.Message): This field is semantically required and it is expected that end_time >= start_time. """ - - end_time_unix_nano: builtins.int = ... + end_time_unix_nano: builtins.int """end_time_unix_nano is the end time of the span. On the client side, this is the time kept by the local machine where the span execution ends. On the server side, this is the time when the server application handler stops running. @@ -457,9 +571,12 @@ class Span(google.protobuf.message.Message): This field is semantically required and it is expected that end_time >= start_time. """ - @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ]: """attributes is a collection of key/value pairs. Note, global attributes like server name can be set using the resource API. Examples of attributes: @@ -473,112 +590,168 @@ class Span(google.protobuf.message.Message): Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - pass - dropped_attributes_count: builtins.int = ... + dropped_attributes_count: builtins.int """dropped_attributes_count is the number of attributes that were discarded. Attributes can be discarded because their keys are too long or because there are too many attributes. If this value is 0, then no attributes were dropped. """ - @property - def events(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span.Event]: + def events( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Span.Event + ]: """events is a collection of Event items.""" - pass - dropped_events_count: builtins.int = ... + dropped_events_count: builtins.int """dropped_events_count is the number of dropped events. If the value is 0, then no events were dropped. """ - @property - def links(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span.Link]: + def links( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Span.Link + ]: """links is a collection of Links, which are references from this span to a span in the same or different trace. """ - pass - dropped_links_count: builtins.int = ... + dropped_links_count: builtins.int """dropped_links_count is the number of dropped links after the maximum size was enforced. If this value is 0, then no links were dropped. """ - @property def status(self) -> global___Status: """An optional final status for this span. Semantically when Status isn't set, it means span's status code is unset, i.e. assume STATUS_CODE_UNSET (code = 0). """ - pass - def __init__(self, + + def __init__( + self, *, - trace_id : builtins.bytes = ..., - span_id : builtins.bytes = ..., - trace_state : typing.Text = ..., - parent_span_id : builtins.bytes = ..., - flags : builtins.int = ..., - name : typing.Text = ..., - kind : global___Span.SpanKind.V = ..., - start_time_unix_nano : builtins.int = ..., - end_time_unix_nano : builtins.int = ..., - attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ..., - dropped_attributes_count : builtins.int = ..., - events : typing.Optional[typing.Iterable[global___Span.Event]] = ..., - dropped_events_count : builtins.int = ..., - links : typing.Optional[typing.Iterable[global___Span.Link]] = ..., - dropped_links_count : builtins.int = ..., - status : typing.Optional[global___Status] = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["status",b"status"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","dropped_attributes_count",b"dropped_attributes_count","dropped_events_count",b"dropped_events_count","dropped_links_count",b"dropped_links_count","end_time_unix_nano",b"end_time_unix_nano","events",b"events","flags",b"flags","kind",b"kind","links",b"links","name",b"name","parent_span_id",b"parent_span_id","span_id",b"span_id","start_time_unix_nano",b"start_time_unix_nano","status",b"status","trace_id",b"trace_id","trace_state",b"trace_state"]) -> None: ... + trace_id: builtins.bytes = ..., + span_id: builtins.bytes = ..., + trace_state: builtins.str = ..., + parent_span_id: builtins.bytes = ..., + flags: builtins.int = ..., + name: builtins.str = ..., + kind: global___Span.SpanKind.ValueType = ..., + start_time_unix_nano: builtins.int = ..., + end_time_unix_nano: builtins.int = ..., + attributes: ( + collections.abc.Iterable[ + opentelemetry.proto.common.v1.common_pb2.KeyValue + ] + | None + ) = ..., + dropped_attributes_count: builtins.int = ..., + events: collections.abc.Iterable[global___Span.Event] | None = ..., + dropped_events_count: builtins.int = ..., + links: collections.abc.Iterable[global___Span.Link] | None = ..., + dropped_links_count: builtins.int = ..., + status: global___Status | None = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["status", b"status"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "attributes", + b"attributes", + "dropped_attributes_count", + b"dropped_attributes_count", + "dropped_events_count", + b"dropped_events_count", + "dropped_links_count", + b"dropped_links_count", + "end_time_unix_nano", + b"end_time_unix_nano", + "events", + b"events", + "flags", + b"flags", + "kind", + b"kind", + "links", + b"links", + "name", + b"name", + "parent_span_id", + b"parent_span_id", + "span_id", + b"span_id", + "start_time_unix_nano", + b"start_time_unix_nano", + "status", + b"status", + "trace_id", + b"trace_id", + "trace_state", + b"trace_state", + ], + ) -> None: ... + global___Span = Span +@typing_extensions.final class Status(google.protobuf.message.Message): """The Status type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. """ - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - class StatusCode(_StatusCode, metaclass=_StatusCodeEnumTypeWrapper): - """For the semantics of status codes see - https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/api.md#set-status - """ - pass + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class _StatusCode: - V = typing.NewType('V', builtins.int) - class _StatusCodeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_StatusCode.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - STATUS_CODE_UNSET = Status.StatusCode.V(0) + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _StatusCodeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + Status._StatusCode.ValueType + ], + builtins.type, + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + STATUS_CODE_UNSET: Status._StatusCode.ValueType # 0 """The default status.""" - - STATUS_CODE_OK = Status.StatusCode.V(1) + STATUS_CODE_OK: Status._StatusCode.ValueType # 1 """The Span has been validated by an Application developer or Operator to have completed successfully. """ - - STATUS_CODE_ERROR = Status.StatusCode.V(2) + STATUS_CODE_ERROR: Status._StatusCode.ValueType # 2 """The Span contains an error.""" + class StatusCode(_StatusCode, metaclass=_StatusCodeEnumTypeWrapper): + """For the semantics of status codes see + https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/api.md#set-status + """ - STATUS_CODE_UNSET = Status.StatusCode.V(0) + STATUS_CODE_UNSET: Status.StatusCode.ValueType # 0 """The default status.""" - - STATUS_CODE_OK = Status.StatusCode.V(1) + STATUS_CODE_OK: Status.StatusCode.ValueType # 1 """The Span has been validated by an Application developer or Operator to have completed successfully. """ - - STATUS_CODE_ERROR = Status.StatusCode.V(2) + STATUS_CODE_ERROR: Status.StatusCode.ValueType # 2 """The Span contains an error.""" - MESSAGE_FIELD_NUMBER: builtins.int CODE_FIELD_NUMBER: builtins.int - message: typing.Text = ... + message: builtins.str """A developer-facing human readable error message.""" - - code: global___Status.StatusCode.V = ... + code: global___Status.StatusCode.ValueType """The status code.""" - - def __init__(self, + def __init__( + self, *, - message : typing.Text = ..., - code : global___Status.StatusCode.V = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["code",b"code","message",b"message"]) -> None: ... + message: builtins.str = ..., + code: global___Status.StatusCode.ValueType = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "code", b"code", "message", b"message" + ], + ) -> None: ... + global___Status = Status diff --git a/opentelemetry-proto/test-requirements-1.txt b/opentelemetry-proto/test-requirements-1.txt deleted file mode 100644 index b598b4f172a..00000000000 --- a/opentelemetry-proto/test-requirements-1.txt +++ /dev/null @@ -1,14 +0,0 @@ -asgiref==3.7.2 -Deprecated==1.2.14 -importlib-metadata==6.11.0 -iniconfig==2.0.0 -packaging==24.0 -pluggy==1.5.0 -protobuf==4.25.3 -py-cpuinfo==9.0.0 -pytest==7.4.4 -tomli==2.0.1 -typing_extensions==4.10.0 -wrapt==1.16.0 -zipp==3.19.2 --e opentelemetry-proto diff --git a/opentelemetry-proto/test-requirements-0.txt b/opentelemetry-proto/test-requirements.txt similarity index 93% rename from opentelemetry-proto/test-requirements-0.txt rename to opentelemetry-proto/test-requirements.txt index 671756980ce..e1e58e1a9ad 100644 --- a/opentelemetry-proto/test-requirements-0.txt +++ b/opentelemetry-proto/test-requirements.txt @@ -4,7 +4,7 @@ importlib-metadata==6.11.0 iniconfig==2.0.0 packaging==24.0 pluggy==1.5.0 -protobuf==3.20.3 +protobuf==5.26.1 py-cpuinfo==9.0.0 pytest==7.4.4 tomli==2.0.1 diff --git a/tox.ini b/tox.ini index 973158af1f1..69d9a08b472 100644 --- a/tox.ini +++ b/tox.ini @@ -10,13 +10,9 @@ envlist = pypy3-test-opentelemetry-api lint-opentelemetry-api - ; The numbers at the end of the environment names - ; below mean these dependencies are being used: - ; 0: protobuf==3.20.3 - ; 1: protobuf==4.25.3 - py3{8,9,10,11,12}-test-opentelemetry-proto-{0,1} - pypy3-test-opentelemetry-proto-{0,1} - lint-opentelemetry-proto + py3{8,9,10,11,12}-test-opentelemetry-proto-protobuf5 + pypy3-test-opentelemetry-proto-protobuf5 + lint-opentelemetry-proto-protobuf5 py3{8,9,10,11,12}-test-opentelemetry-sdk pypy3-test-opentelemetry-sdk @@ -42,12 +38,8 @@ envlist = ; exporter-opencensus intentionally excluded from pypy3 lint-opentelemetry-exporter-opencensus - ; The numbers at the end of the environment names - ; below mean these dependencies are being used: - ; 0: protobuf==3.20.3 - ; 1: protobuf==4.25.3 - py3{8,9,10,11,12}-test-opentelemetry-exporter-otlp-proto-common-{0,1} - pypy3-test-opentelemetry-exporter-otlp-proto-common-{0,1} + py3{8,9,10,11,12}-test-opentelemetry-exporter-otlp-proto-common + pypy3-test-opentelemetry-exporter-otlp-proto-common lint-opentelemetry-exporter-otlp-proto-common ; opentelemetry-exporter-otlp @@ -55,21 +47,13 @@ envlist = ; intentionally excluded from pypy3 lint-opentelemetry-exporter-otlp-combined - ; The numbers at the end of the environment names - ; below mean these dependencies are being used: - ; 0: protobuf==3.20.3 - ; 1: protobuf==4.25.3 - py3{8,9,10,11,12}-test-opentelemetry-exporter-otlp-proto-grpc-{0,1} + py3{8,9,10,11,12}-test-opentelemetry-exporter-otlp-proto-grpc ; intentionally excluded from pypy3 lint-opentelemetry-exporter-otlp-proto-grpc benchmark-opentelemetry-exporter-otlp-proto-grpc - ; The numbers at the end of the environment names - ; below mean these dependencies are being used: - ; 0: protobuf==3.20.3 - ; 1: protobuf==4.25.3 - py3{8,9,10,11,12}-test-opentelemetry-exporter-otlp-proto-http-{0,1} - pypy3-test-opentelemetry-exporter-otlp-proto-http-{0,1} + py3{8,9,10,11,12}-test-opentelemetry-exporter-otlp-proto-http + pypy3-test-opentelemetry-exporter-otlp-proto-http lint-opentelemetry-exporter-otlp-proto-http py3{8,9,10,11,12}-test-opentelemetry-exporter-prometheus @@ -107,7 +91,7 @@ envlist = mypy,mypyinstalled pyright docs - docker-tests-proto{3,4} + docker-tests-{otlpexporter,opencensus} public-symbols-check shellcheck generate-workflows @@ -118,10 +102,6 @@ deps = coverage: pytest coverage: pytest-cov - ; proto 3 and 4 tests install the respective version of protobuf - proto3: protobuf~=3.19.0 - proto4: protobuf~=4.0 - allowlist_externals = sh setenv = @@ -144,9 +124,7 @@ commands_pre = test-utils: pip install -r {toxinidir}/tests/opentelemetry-test-utils/test-requirements.txt - opentelemetry-proto-0: pip install -r {toxinidir}/opentelemetry-proto/test-requirements-0.txt - opentelemetry-proto-1: pip install -r {toxinidir}/opentelemetry-proto/test-requirements-1.txt - lint-opentelemetry-proto: pip install -r {toxinidir}/opentelemetry-proto/test-requirements-1.txt + opentelemetry-proto-protobuf5: pip install -r {toxinidir}/opentelemetry-proto/test-requirements.txt getting-started: pip install -r {toxinidir}/docs/getting_started/tests/requirements.txt getting-started: pip install -e {env:CONTRIB_REPO}\#egg=opentelemetry-util-http&subdirectory=util/opentelemetry-util-http @@ -157,20 +135,14 @@ commands_pre = exporter-opencensus: pip install -r {toxinidir}/exporter/opentelemetry-exporter-opencensus/test-requirements.txt - exporter-otlp-proto-common-0: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements-0.txt - exporter-otlp-proto-common-1: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements-1.txt - lint-opentelemetry-exporter-otlp-proto-common: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements-1.txt + exporter-otlp-proto-common: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements.txt exporter-otlp-combined: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp/test-requirements.txt - opentelemetry-exporter-otlp-proto-grpc-0: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements-0.txt - opentelemetry-exporter-otlp-proto-grpc-1: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements-1.txt - lint-opentelemetry-exporter-otlp-proto-grpc: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements-1.txt + opentelemetry-exporter-otlp-proto-grpc: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements.txt benchmark-exporter-otlp-proto-grpc: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc/benchmark-requirements.txt - opentelemetry-exporter-otlp-proto-http-0: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http/test-requirements-0.txt - opentelemetry-exporter-otlp-proto-http-1: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http/test-requirements-1.txt - lint-opentelemetry-exporter-otlp-proto-http: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http/test-requirements-1.txt + opentelemetry-exporter-otlp-proto-http: pip install -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http/test-requirements.txt opentracing-shim: pip install -r {toxinidir}/shim/opentelemetry-opentracing-shim/test-requirements.txt @@ -211,11 +183,11 @@ commands = lint-opentelemetry-sdk: pylint {toxinidir}/opentelemetry-sdk benchmark-opentelemetry-sdk: pytest {toxinidir}/opentelemetry-sdk/benchmarks --benchmark-json={toxinidir}/opentelemetry-sdk/sdk-benchmark.json {posargs} - test-opentelemetry-proto: pytest {toxinidir}/opentelemetry-proto/tests {posargs} - lint-opentelemetry-proto: black --diff --check --config {toxinidir}/pyproject.toml {toxinidir}/opentelemetry-proto - lint-opentelemetry-proto: isort --diff --check-only --settings-path {toxinidir}/.isort.cfg {toxinidir}/opentelemetry-proto - lint-opentelemetry-proto: flake8 --config {toxinidir}/.flake8 {toxinidir}/opentelemetry-proto - lint-opentelemetry-proto: pylint {toxinidir}/opentelemetry-proto + test-opentelemetry-proto-protobuf5: pytest {toxinidir}/opentelemetry-proto/tests {posargs} + lint-opentelemetry-proto-protobuf5: black --diff --check --config {toxinidir}/pyproject.toml {toxinidir}/opentelemetry-proto + lint-opentelemetry-proto-protobuf5: isort --diff --check-only --settings-path {toxinidir}/.isort.cfg {toxinidir}/opentelemetry-proto + lint-opentelemetry-proto-protobuf5: flake8 --config {toxinidir}/.flake8 {toxinidir}/opentelemetry-proto + lint-opentelemetry-proto-protobuf5: pylint {toxinidir}/opentelemetry-proto test-opentelemetry-semantic-conventions: pytest {toxinidir}/opentelemetry-semantic-conventions/tests {posargs} lint-opentelemetry-semantic-conventions: black --diff --check --config {toxinidir}/pyproject.toml {toxinidir}/opentelemetry-semantic-conventions @@ -251,7 +223,7 @@ commands = lint-opentelemetry-exporter-otlp-proto-common: black --diff --check --config {toxinidir}/pyproject.toml {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common lint-opentelemetry-exporter-otlp-proto-common: isort --diff --check-only --settings-path {toxinidir}/.isort.cfg {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common lint-opentelemetry-exporter-otlp-proto-common: flake8 --config {toxinidir}/.flake8 {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common - lint-opentelemetry-exporter-otlp-proto-common: sh -c "cd exporter && pylint --rcfile ../.pylintrc {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common" + lint-opentelemetry-exporter-otlp-proto-common: sh -c "cd exporter && pylint --prefer-stubs yes --rcfile ../.pylintrc {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common" test-opentelemetry-exporter-otlp-combined: pytest {toxinidir}/exporter/opentelemetry-exporter-otlp/tests {posargs} lint-opentelemetry-exporter-otlp-combined: black --diff --check --config {toxinidir}/pyproject.toml {toxinidir}/exporter/opentelemetry-exporter-otlp @@ -263,14 +235,14 @@ commands = lint-opentelemetry-exporter-otlp-proto-grpc: black --diff --check --config {toxinidir}/pyproject.toml {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc lint-opentelemetry-exporter-otlp-proto-grpc: isort --diff --check-only --settings-path {toxinidir}/.isort.cfg {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc lint-opentelemetry-exporter-otlp-proto-grpc: flake8 --config {toxinidir}/.flake8 {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc - lint-opentelemetry-exporter-otlp-proto-grpc: sh -c "cd exporter && pylint --rcfile ../.pylintrc {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc" + lint-opentelemetry-exporter-otlp-proto-grpc: sh -c "cd exporter && pylint --prefer-stubs yes --rcfile ../.pylintrc {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc" benchmark-opentelemetry-exporter-otlp-proto-grpc: pytest {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc/benchmarks --benchmark-json=exporter-otlp-proto-grpc-benchmark.json {posargs} test-opentelemetry-exporter-otlp-proto-http: pytest {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http/tests {posargs} lint-opentelemetry-exporter-otlp-proto-http: black --diff --check --config {toxinidir}/pyproject.toml {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http lint-opentelemetry-exporter-otlp-proto-http: isort --diff --check-only --settings-path {toxinidir}/.isort.cfg {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http lint-opentelemetry-exporter-otlp-proto-http: flake8 --config {toxinidir}/.flake8 {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http - lint-opentelemetry-exporter-otlp-proto-http: sh -c "cd exporter && pylint --rcfile ../.pylintrc {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http" + lint-opentelemetry-exporter-otlp-proto-http: sh -c "cd exporter && pylint --prefer-stubs yes --rcfile ../.pylintrc {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http" test-opentelemetry-exporter-prometheus: pytest {toxinidir}/exporter/opentelemetry-exporter-prometheus/tests {posargs} lint-opentelemetry-exporter-prometheus: black --diff --check --config {toxinidir}/pyproject.toml {toxinidir}/exporter/opentelemetry-exporter-prometheus @@ -355,6 +327,11 @@ recreate = True deps = -c {toxinidir}/dev-requirements.txt -r {toxinidir}/docs-requirements.txt +setenv = + ; We need this workaround to allow generating docs for exporters that have different protobuf versions as requirement. + ; See https://github.com/open-telemetry/opentelemetry-python/pull/4206 + ; We can remove the workaround when opentelemetry-exporter-zipkin-proto-http support protobuf > 5.26 + PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python changedir = docs commands = sphinx-build -E -a -W -b html -T . _build/html @@ -384,7 +361,7 @@ commands_pre = commands = {toxinidir}/scripts/tracecontext-integration-test.sh -[testenv:docker-tests-proto{3,4}] +[testenv:docker-tests-{otlpexporter,opencensus}] deps = pytest==7.1.3 # Pinning PyYAML for issue: https://github.com/yaml/pyyaml/issues/724 @@ -394,10 +371,6 @@ deps = docker-compose==1.29.2 requests==2.28.2 - ; proto 3 and 4 tests install the respective version of protobuf - proto3: protobuf~=3.19.0 - proto4: protobuf~=4.0 - changedir = tests/opentelemetry-docker-tests/tests @@ -406,19 +379,20 @@ commands_pre = pip install -e {toxinidir}/opentelemetry-api \ -e {toxinidir}/opentelemetry-semantic-conventions \ -e {toxinidir}/opentelemetry-sdk \ - -e {toxinidir}/tests/opentelemetry-test-utils \ - ; opencensus exporter does not work with protobuf 4 - proto3: -e {toxinidir}/exporter/opentelemetry-exporter-opencensus \ - -e {toxinidir}/opentelemetry-proto \ - -e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common \ - -e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc \ - -e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http \ - -e {toxinidir}/exporter/opentelemetry-exporter-otlp + -e {toxinidir}/tests/opentelemetry-test-utils + + otlpexporter: pip install -e {toxinidir}/opentelemetry-proto + otlpexporter: pip install -e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common + otlpexporter: pip install -e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc + otlpexporter: pip install -e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http + otlpexporter: pip install -e {toxinidir}/exporter/opentelemetry-exporter-otlp + + opencensus: pip install -e {toxinidir}/exporter/opentelemetry-exporter-opencensus + docker-compose up -d commands = - proto3: pytest {posargs} - ; opencensus exporter does not work with protobuf 4 - proto4: pytest --ignore opencensus {posargs} + otlpexporter: pytest otlpexporter {posargs} + opencensus: pytest opencensus {posargs} commands_post = docker-compose down -v