Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
xuan-cao-swi committed Sep 27, 2024
1 parent d3378f2 commit 3eb733b
Show file tree
Hide file tree
Showing 9 changed files with 167 additions and 56 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,13 @@
require_relative 'exponential_histogram/logarithm_mapping'
require_relative 'exponential_histogram/exponent_mapping'


module OpenTelemetry
module SDK
module Metrics
module Aggregation
# Contains the implementation of the ExponentialBucketHistogram aggregation
# https://opentelemetry.io/docs/specs/otel/metrics/data-model/#exponentialhistogram
class ExponentialBucketHistogram
class ExponentialBucketHistogram # rubocop:disable Metrics/ClassLength
attr_reader :aggregation_temporality

# relate to min max scale: https://opentelemetry.io/docs/specs/otel/metrics/sdk/#support-a-minimum-and-maximum-scale
Expand Down Expand Up @@ -71,11 +70,10 @@ def collect(start_time, end_time)
end
end

# ruby seems only record local min and max
# rubocop:disable Metrics/MethodLength, Metrics/CyclomaticComplexity
def update(amount, attributes)
# fetch or initialize the ExponentialHistogramDataPoint
hdp = @data_points.fetch(attributes) do

if @record_min_max
min = Float::INFINITY
max = -Float::INFINITY
Expand All @@ -95,7 +93,7 @@ def update(amount, attributes)
nil, # :exemplars
min, # :min
max, # :max
@zero_threshold, # :zero_threshold)
@zero_threshold # :zero_threshold)
)
end

Expand Down Expand Up @@ -123,7 +121,7 @@ def update(amount, attributes)
is_rescaling_needed = false
low = high = 0

if buckets.counts.empty?
if buckets.counts == [0] # special case of empty
buckets.index_start = bucket_index
buckets.index_end = bucket_index
buckets.index_base = bucket_index
Expand Down Expand Up @@ -171,12 +169,13 @@ def update(amount, attributes)
buckets.index_end = bucket_index
end

bucket_index = bucket_index - buckets.index_base
bucket_index -= buckets.index_base
bucket_index += buckets.counts.size if bucket_index.negative?

buckets.increment_bucket(bucket_index)
nil
end
# rubocop:enable Metrics/MethodLength, Metrics/CyclomaticComplexity

private

Expand All @@ -203,7 +202,7 @@ def get_scale_change(low, high)

def downscale(change, positive, negative)
return if change == 0
raise "Invalid change of scale" if change.negative?
raise 'Invalid change of scale' if change.negative?

positive.downscale(change)
negative.downscale(change)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ module SDK
module Metrics
module Aggregation
module ExponentialHistogram
# Buckets is the fundamental building block of exponential histogram that store bucket/boundary value
class Buckets
attr_accessor :index_start, :index_end, :index_base
attr_reader :counts

def initialize
@counts = [0]
Expand All @@ -30,7 +30,7 @@ def grow(needed, max_size)
new_positive_limit = new_size - bias

tmp = Array.new(new_size, 0)
tmp[new_positive_limit..-1] = @counts[old_positive_limit..-1]
tmp[new_positive_limit..-1] = @counts[old_positive_limit..]
tmp[0...old_positive_limit] = @counts[0...old_positive_limit]
@counts = tmp
end
Expand All @@ -39,11 +39,11 @@ def offset
@index_start
end

def get_offset_counts
def offset_counts
bias = @index_base - @index_start
@counts[-bias..-1] + @counts[0...-bias]
@counts[-bias..] + @counts[0...-bias]
end
alias_method :counts, :get_offset_counts
alias counts offset_counts

def length
return 0 if @counts.empty?
Expand All @@ -67,7 +67,7 @@ def downscale(amount)
if bias != 0
@index_base = @index_start
@counts.reverse!
@counts = @counts[0...bias].reverse + @counts[bias..-1].reverse
@counts = @counts[0...bias].reverse + @counts[bias..].reverse
end

size = 1 + @index_end - @index_start
Expand Down Expand Up @@ -110,4 +110,3 @@ def increment_bucket(bucket_index, increment = 1)
end
end
end

Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ module SDK
module Metrics
module Aggregation
module ExponentialHistogram
# LogarithmMapping for mapping when scale < 0
class ExponentMapping
attr_reader :scale

Expand All @@ -19,7 +20,6 @@ def initialize(scale)
end

def map_to_index(value)

return @min_normal_lower_boundary_index if value < IEEE754::MIN_NORMAL_VALUE

exponent = IEEE754.get_ieee_754_exponent(value)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ module SDK
module Metrics
module Aggregation
module ExponentialHistogram
# IEEE754 standard for floating-point calculation
module IEEE754
extend Fiddle::Importer
dlload Fiddle::Handle::DEFAULT
Expand All @@ -19,7 +20,7 @@ module IEEE754
EXPONENT_WIDTH = 11

MANTISSA_MASK = (1 << MANTISSA_WIDTH) - 1
EXPONENT_BIAS = (2 ** (EXPONENT_WIDTH - 1)) - 1
EXPONENT_BIAS = (2**(EXPONENT_WIDTH - 1)) - 1
EXPONENT_MASK = ((1 << EXPONENT_WIDTH) - 1) << MANTISSA_WIDTH
SIGN_MASK = 1 << (EXPONENT_WIDTH + MANTISSA_WIDTH)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ module SDK
module Metrics
module Aggregation
module ExponentialHistogram
# Log2eScaleFactor is precomputed scale factor value
class Log2eScaleFactor
MAX_SCALE = 20

Expand All @@ -25,4 +26,4 @@ def self.log2e_scale_buckets
end
end
end
end
end
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,13 @@ module SDK
module Metrics
module Aggregation
module ExponentialHistogram
# LogarithmMapping for mapping when scale > 0
class LogarithmMapping
attr_reader :scale

def initialize(scale)
@scale = scale
@scale_factor = Log2eScaleFactor::LOG2E_SCALE_BUCKETS[scale] # scale_factor is used for mapping the index
@scale_factor = Log2eScaleFactor::LOG2E_SCALE_BUCKETS[scale] # scale_factor is used for mapping the index
@min_normal_lower_boundary_index = IEEE754::MIN_NORMAL_EXPONENT << @scale
@max_normal_lower_boundary_index = ((IEEE754::MAX_NORMAL_EXPONENT + 1) << @scale) - 1
end
Expand All @@ -33,12 +34,14 @@ def map_to_index(value)
def get_lower_boundary(inds)
if inds >= @max_normal_lower_boundary_index
return 2 * Math.exp((inds - (1 << @scale)) / @scale_factor) if inds == @max_normal_lower_boundary_index

raise StandardError, 'mapping overflow'
end

if inds <= @min_normal_lower_boundary_index
return IEEE754::MIN_NORMAL_VALUE if inds == @min_normal_lower_boundary_index
return Math.exp((inds + (1 << @scale)) / @scale_factor) / 2 if inds == @min_normal_lower_boundary_index - 1

raise StandardError, 'mapping underflow'
end

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,23 +8,22 @@ module OpenTelemetry
module SDK
module Metrics
module Aggregation
# TODO: Deal with this later
# rubocop:disable Lint/StructNewOverride

ExponentialHistogramDataPoint = Struct.new(:attributes, # optional Hash{String => String, Numeric, Boolean, Array<String, Numeric, Boolean>}
:start_time_unix_nano, # Integer nanoseconds since Epoch
:time_unix_nano, # Integer nanoseconds since Epoch
:count, # Integer count is the number of values in the population. Must be non-negative
:sum, # Integer sum of the values in the population. If count is zero then this field then this field must be zero
:scale, # Integer scale factor
:zero_count, # Integer special bucket that count of observations that fall into the zero bucket
:positive, # Buckets representing the positive range of the histogram.
:negative, # Buckets representing the negative range of the histogram.
:flags, # Integer flags associated with the data point.
:exemplars, # optional List of exemplars collected from measurements that were used to form the data point
:min, # optional Float min is the minimum value over (start_time, end_time].
:max, # optional Float max is the maximum value over (start_time, end_time].
:zero_threshold) # optional Float the threshold for the zero bucket
ExponentialHistogramDataPoint = Struct.new(:attributes, # optional Hash{String => String, Numeric, Boolean, Array<String, Numeric, Boolean>}
:start_time_unix_nano, # Integer nanoseconds since Epoch
:time_unix_nano, # Integer nanoseconds since Epoch
:count, # Integer count is the number of values in the population. Must be non-negative
:sum, # Integer sum of the values in the population. If count is zero then this field then this field must be zero
:scale, # Integer scale factor
:zero_count, # Integer special bucket that count of observations that fall into the zero bucket
:positive, # Buckets representing the positive range of the histogram.
:negative, # Buckets representing the negative range of the histogram.
:flags, # Integer flags associated with the data point.
:exemplars, # optional List of exemplars collected from measurements that were used to form the data point
:min, # optional Float min is the minimum value over (start_time, end_time].
:max, # optional Float max is the maximum value over (start_time, end_time].
:zero_threshold) # optional Float the threshold for the zero bucket
# rubocop:enable Lint/StructNewOverride
end
end
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
expbh.update(1.23, {})
expbh.update(0, {})

expbh.update(1.45, {'foo' => 'bar'})
expbh.update(1.67, {'foo' => 'bar'})
expbh.update(1.45, { 'foo' => 'bar' })
expbh.update(1.67, { 'foo' => 'bar' })

exphdps = expbh.collect(start_time, end_time)

Expand All @@ -43,7 +43,7 @@
_(exphdps[0].max).must_equal(1.23)
_(exphdps[0].scale).must_equal(5)
_(exphdps[0].zero_count).must_equal(1)
_(exphdps[0].positive.counts).must_equal([0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0])
_(exphdps[0].positive.counts).must_equal([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0])
_(exphdps[0].negative.counts).must_equal([0])
_(exphdps[0].zero_threshold).must_equal(0)

Expand All @@ -52,9 +52,9 @@
_(exphdps[1].sum).must_equal(3.12)
_(exphdps[1].min).must_equal(1.45)
_(exphdps[1].max).must_equal(1.67)
_(exphdps[1].scale).must_equal(4)
_(exphdps[1].scale).must_equal(5)
_(exphdps[1].zero_count).must_equal(0)
_(exphdps[1].positive.counts).must_equal([0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0])
_(exphdps[1].positive.counts).must_equal([1, 0, 0, 0, 0, 0, 1, 0])
_(exphdps[1].negative.counts).must_equal([0])
_(exphdps[1].zero_threshold).must_equal(0)
end
Expand Down Expand Up @@ -133,6 +133,122 @@
_(exphdps[0].zero_threshold).must_equal(0)
end

it 'test_permutations' do
test_cases = [
[
[0.5, 1.0, 2.0],
{
scale: -1,
offset: -1,
len: 2,
at_zero: 2,
at_one: 1
}
],
[
[1.0, 2.0, 4.0],
{
scale: -1,
offset: -1,
len: 2,
at_zero: 1,
at_one: 2
}
],
[
[0.25, 0.5, 1.0],
{
scale: -1,
offset: -2,
len: 2,
at_zero: 1,
at_one: 2
}
]
]

test_cases.each do |test_values, expected|
test_values.permutation.each do |permutation|
expbh = OpenTelemetry::SDK::Metrics::Aggregation::ExponentialBucketHistogram.new(
aggregation_temporality: aggregation_temporality,
record_min_max: record_min_max,
max_size: 2,
max_scale: 20, # use default value of max scale; should downscale to 0
zero_threshold: 0
)

permutation.each do |value|
expbh.update(value, {})
end

exphdps = expbh.collect(start_time, end_time)

assert_equal expected[:scale], exphdps[0].scale
assert_equal expected[:offset], exphdps[0].positive.offset
assert_equal expected[:len], exphdps[0].positive.length
assert_equal expected[:at_zero], exphdps[0].positive.counts[0]
assert_equal expected[:at_one], exphdps[0].positive.counts[1]
end
end
end

it 'test_full_range' do
expbh = OpenTelemetry::SDK::Metrics::Aggregation::ExponentialBucketHistogram.new(
aggregation_temporality: aggregation_temporality,
record_min_max: record_min_max,
max_size: 2,
max_scale: 20, # use default value of max scale; should downscale to 0
zero_threshold: 0
)

expbh.update(Float::MAX, {})
expbh.update(1, {})
expbh.update(2**-1074, {})

exphdps = expbh.collect(start_time, end_time)

assert_equal Float::MAX, exphdps[0].sum
assert_equal 3, exphdps[0].count
assert_equal(-10, exphdps[0].scale)

assert_equal 2, exphdps[0].positive.length
assert_equal(-1, exphdps[0].positive.offset)
assert_operator exphdps[0].positive.counts[0], :<=, 2
assert_operator exphdps[0].positive.counts[1], :<=, 1
end

it 'test_aggregator_min_max' do
expbh = OpenTelemetry::SDK::Metrics::Aggregation::ExponentialBucketHistogram.new(
aggregation_temporality: aggregation_temporality,
record_min_max: record_min_max,
zero_threshold: 0
)

[1, 3, 5, 7, 9].each do |value|
expbh.update(value, {})
end

exphdps = expbh.collect(start_time, end_time)

assert_equal 1, exphdps[0].min
assert_equal 9, exphdps[0].max

expbh = OpenTelemetry::SDK::Metrics::Aggregation::ExponentialBucketHistogram.new(
aggregation_temporality: aggregation_temporality,
record_min_max: record_min_max,
zero_threshold: 0
)

[-1, -3, -5, -7, -9].each do |value|
expbh.update(value, {})
end

exphdps = expbh.collect(start_time, end_time)

assert_equal(-9, exphdps[0].min)
assert_equal(-1, exphdps[0].max)
end

it 'test_merge' do
# TODO
end
Expand Down
Loading

0 comments on commit 3eb733b

Please sign in to comment.