[why] Reduce the nesting of output results
[example] es-jobs submit_id=a7c2f144-aa64-4a23-a390-cfe5bca3b430
[output]
{"kvcount.suite=iperf"=>2, "kvcount.os=openeuler"=>2, "kvcount.arch=aarch64"=>2, "kvcount.job_state=finished"=>2, "kvcount.tbox_group=vm-2p16g"=>2, "kvcount.submit_id=a7c2f144-aa64-4a23-a390-cfe5bca3b430"=>2, "sum.stats.kmsg.timestamp:last"=>2, "sum.stats.dmesg.timestamp:last"=>2, "raw.stats.kmsg.timestamp:last"=>[75.256233, 48.990905], "raw.stats.iperf.tcp.receiver.bps"=>[34017924155.510155, nil], "raw.stats.dmesg.timestamp:last"=>[75.256233, 48.990905], "raw.stats.iperf.tcp.sender.bps"=>[34073687935.01113, nil], "raw.stats.iperf.udp.bps"=>[nil, 1048573.3083402428], "avg.stats.iperf.tcp.receiver.bps"=>34017924155.510155, "avg.stats.iperf.tcp.sender.bps"=>34073687935.01113, "avg.stats.iperf.udp.bps"=>1048573.3083402428, "max.stats.iperf.tcp.receiver.bps"=>34017924155.510155, "max.stats.iperf.tcp.sender.bps"=>34073687935.01113, "max.stats.iperf.udp.bps"=>1048573.3083402428, "min.stats.iperf.tcp.receiver.bps"=>34017924155.510155, "min.stats.iperf.tcp.sender.bps"=>34073687935.01113, "min.stats.iperf.udp.bps"=>1048573.3083402428}
Signed-off-by: Lu Kaiyi 2392863668@qq.com --- lib/es_jobs.rb | 115 +++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 102 insertions(+), 13 deletions(-)
diff --git a/lib/es_jobs.rb b/lib/es_jobs.rb index a90b1ba..754578d 100644 --- a/lib/es_jobs.rb +++ b/lib/es_jobs.rb @@ -2,6 +2,7 @@ # frozen_string_literal: true
LKP_SRC = ENV['LKP_SRC'] || '/c/lkp-tests' +KEYWORD = %w[suite os arch category job_state tbox_group upstream_repo submit_id group_id]
require "#{LKP_SRC}/lib/stats" require_relative './es_query' @@ -18,10 +19,10 @@ class ESJobs @refine_jobs = [] @jobs = {} @stats_level = { - 0 => 'stats.success', - 1 => 'stats.unknown', - 2 => 'stats.warning', - 3 => 'stats.has_error' + 0 => 'success', + 1 => 'unknown', + 2 => 'warning', + 3 => 'has_error' } set_defaults deal_jobs @@ -123,17 +124,105 @@ class ESJobs return 0 end
- def output - result = { - 'stats.count' => @stats['stats.count'] - } + def query_jobs_from_es(items) + es = ESQuery.new(ES_HOST, ES_PORT) + result = es.multi_field_query items + jobs = result['hits']['hits'] + jobs.map! { |job| job['_source'] } + return jobs + end + + def get_all_metrics(jobs) + metrics = [] + jobs.each do |job| + stats = job['stats'] + next unless stats + + metrics.concat(stats.keys) + end + metrics.uniq! + end + + def initialize_result_hash(jobs, metrics) + result = {} + result['kvcount'] = {} + result['sum.stats'] = {} + result['raw.stats'] = {} + result['avg.stats'] = {} + result['max.stats'] = {} + result['min.stats'] = {} + metrics.each { |metric| result['raw.stats'][metric] = [] } + result + end + + def set_default_value(result, stats, metrics) + job_metrics = stats.keys + left_metrics = metrics - job_metrics + left_metrics.each { |metric| result['raw.stats'][metric] << nil } + + stats.each do |key, value| + result['raw.stats'][key] << value + end + end + + def kvcount(result, job) + KEYWORD.each do |keyword| + next unless job[keyword]
- @stats.each do |key, value| - result[key] = value if @fields.include?(key) + result['kvcount']["#{keyword}=#{job[keyword]}"] ||= 0 + result['kvcount']["#{keyword}=#{job[keyword]}"] += 1 end + end
- @result['stats_filter_result'] = @stats_filter_result unless @stats_filter.empty? - @result.merge!(result) - puts JSON.pretty_generate(@result) + def stats_count(result) + result['raw.stats'].each do |key, value| + if function_stat?(key) + result['sum.stats'][key] = value.compact.size + else + result['avg.stats'][key] = value.compact.sum / value.compact.size.to_f + result['max.stats'][key] = value.compact.max + result['min.stats'][key] = value.compact.min + end + end + end + + def query_jobs_state(jobs) + metrics = get_all_metrics(jobs) + result = initialize_result_hash(jobs, metrics) + jobs.each do |job| + stats = job['stats'] + next unless stats + + set_default_value(result, stats, metrics) + kvcount(result, job) + end + + stats_count(result) + result + end + + def compact_hash(prefix, result) + result.each do |key, value| + if prefix.empty? + prefix_key = "#{key}" + else + prefix_key = "#{prefix}.#{key}" + end + + if value.is_a? Hash + compact_hash(prefix_key, value) + else + @results[prefix_key] = value + end + end + end + + def output + jobs = query_jobs_from_es(@es_query) + @result = query_jobs_state(jobs) + @results = {} + compact_hash('', @result) + pp @results + #puts JSON.pretty_generate(@results) end end
On Fri, Jan 15, 2021 at 11:16:25AM +0800, Lu Kaiyi wrote:
[why] Reduce the nesting of output results
[example] es-jobs submit_id=a7c2f144-aa64-4a23-a390-cfe5bca3b430
[output]
{"kvcount.suite=iperf"=>2, "kvcount.os=openeuler"=>2,
输出yaml吧。
"kvcount.arch=aarch64"=>2, "kvcount.job_state=finished"=>2, "kvcount.tbox_group=vm-2p16g"=>2, "kvcount.submit_id=a7c2f144-aa64-4a23-a390-cfe5bca3b430"=>2, "sum.stats.kmsg.timestamp:last"=>2, "sum.stats.dmesg.timestamp:last"=>2,
"raw.stats.kmsg.timestamp:last"=>[75.256233, 48.990905], "raw.stats.iperf.tcp.receiver.bps"=>[34017924155.510155, nil], "raw.stats.dmesg.timestamp:last"=>[75.256233, 48.990905], "raw.stats.iperf.tcp.sender.bps"=>[34073687935.01113, nil], "raw.stats.iperf.udp.bps"=>[nil, 1048573.3083402428],
这些raw.*字段 仍然保持一行一个 不要变成 - value1 - value2 - value3 的形式
Thanks, Fengguang
"avg.stats.iperf.tcp.receiver.bps"=>34017924155.510155, "avg.stats.iperf.tcp.sender.bps"=>34073687935.01113, "avg.stats.iperf.udp.bps"=>1048573.3083402428, "max.stats.iperf.tcp.receiver.bps"=>34017924155.510155, "max.stats.iperf.tcp.sender.bps"=>34073687935.01113, "max.stats.iperf.udp.bps"=>1048573.3083402428, "min.stats.iperf.tcp.receiver.bps"=>34017924155.510155, "min.stats.iperf.tcp.sender.bps"=>34073687935.01113, "min.stats.iperf.udp.bps"=>1048573.3083402428}
Signed-off-by: Lu Kaiyi 2392863668@qq.com
lib/es_jobs.rb | 115 +++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 102 insertions(+), 13 deletions(-)
diff --git a/lib/es_jobs.rb b/lib/es_jobs.rb index a90b1ba..754578d 100644 --- a/lib/es_jobs.rb +++ b/lib/es_jobs.rb @@ -2,6 +2,7 @@ # frozen_string_literal: true
LKP_SRC = ENV['LKP_SRC'] || '/c/lkp-tests' +KEYWORD = %w[suite os arch category job_state tbox_group upstream_repo submit_id group_id]
require "#{LKP_SRC}/lib/stats" require_relative './es_query' @@ -18,10 +19,10 @@ class ESJobs @refine_jobs = [] @jobs = {} @stats_level = {
0 => 'stats.success',
1 => 'stats.unknown',
2 => 'stats.warning',
3 => 'stats.has_error'
0 => 'success',
1 => 'unknown',
2 => 'warning',
} set_defaults deal_jobs3 => 'has_error'
@@ -123,17 +124,105 @@ class ESJobs return 0 end
- def output
- result = {
'stats.count' => @stats['stats.count']
- }
- def query_jobs_from_es(items)
- es = ESQuery.new(ES_HOST, ES_PORT)
- result = es.multi_field_query items
- jobs = result['hits']['hits']
- jobs.map! { |job| job['_source'] }
- return jobs
- end
- def get_all_metrics(jobs)
- metrics = []
- jobs.each do |job|
stats = job['stats']
next unless stats
metrics.concat(stats.keys)
- end
- metrics.uniq!
- end
- def initialize_result_hash(jobs, metrics)
- result = {}
- result['kvcount'] = {}
- result['sum.stats'] = {}
- result['raw.stats'] = {}
- result['avg.stats'] = {}
- result['max.stats'] = {}
- result['min.stats'] = {}
- metrics.each { |metric| result['raw.stats'][metric] = [] }
- result
- end
- def set_default_value(result, stats, metrics)
- job_metrics = stats.keys
- left_metrics = metrics - job_metrics
- left_metrics.each { |metric| result['raw.stats'][metric] << nil }
- stats.each do |key, value|
result['raw.stats'][key] << value
- end
- end
- def kvcount(result, job)
- KEYWORD.each do |keyword|
next unless job[keyword]
- @stats.each do |key, value|
result[key] = value if @fields.include?(key)
result['kvcount']["#{keyword}=#{job[keyword]}"] ||= 0
endresult['kvcount']["#{keyword}=#{job[keyword]}"] += 1
- end
- @result['stats_filter_result'] = @stats_filter_result unless @stats_filter.empty?
- @result.merge!(result)
- puts JSON.pretty_generate(@result)
- def stats_count(result)
- result['raw.stats'].each do |key, value|
if function_stat?(key)
result['sum.stats'][key] = value.compact.size
else
result['avg.stats'][key] = value.compact.sum / value.compact.size.to_f
result['max.stats'][key] = value.compact.max
result['min.stats'][key] = value.compact.min
end
- end
- end
- def query_jobs_state(jobs)
- metrics = get_all_metrics(jobs)
- result = initialize_result_hash(jobs, metrics)
- jobs.each do |job|
stats = job['stats']
next unless stats
set_default_value(result, stats, metrics)
kvcount(result, job)
- end
- stats_count(result)
- result
- end
- def compact_hash(prefix, result)
- result.each do |key, value|
if prefix.empty?
prefix_key = "#{key}"
else
prefix_key = "#{prefix}.#{key}"
end
if value.is_a? Hash
compact_hash(prefix_key, value)
else
@results[prefix_key] = value
end
- end
- end
- def output
- jobs = query_jobs_from_es(@es_query)
- @result = query_jobs_state(jobs)
- @results = {}
- compact_hash('', @result)
- pp @results
- #puts JSON.pretty_generate(@results) end
end
2.23.0