[why] Reduce the nesting of results and output yaml
[example] es-jobs submit_id=a7c2f144-aa64-4a23-a390-cfe5bca3b430
[output]
--- kvcount.suite=iperf: 2 kvcount.os=openeuler: 2 kvcount.arch=aarch64: 2 kvcount.job_state=finished: 2 kvcount.tbox_group=vm-2p16g: 2 kvcount.submit_id=a7c2f144-aa64-4a23-a390-cfe5bca3b430: 2 sum.stats.kmsg.timestamp:last: 2 sum.stats.dmesg.timestamp:last: 2 raw.stats.kmsg.timestamp:last: "[75.256233, 48.990905]" raw.stats.iperf.tcp.receiver.bps: "[34017924155.510155, nil]" raw.stats.dmesg.timestamp:last: "[75.256233, 48.990905]" raw.stats.iperf.tcp.sender.bps: "[34073687935.01113, nil]" raw.stats.iperf.udp.bps: "[nil, 1048573.3083402428]" avg.stats.iperf.tcp.receiver.bps: 34017924155.510155 avg.stats.iperf.tcp.sender.bps: 34073687935.01113 avg.stats.iperf.udp.bps: 1048573.3083402428 max.stats.iperf.tcp.receiver.bps: 34017924155.510155 max.stats.iperf.tcp.sender.bps: 34073687935.01113 max.stats.iperf.udp.bps: 1048573.3083402428 min.stats.iperf.tcp.receiver.bps: 34017924155.510155 min.stats.iperf.tcp.sender.bps: 34073687935.01113 min.stats.iperf.udp.bps: 1048573.3083402428
Signed-off-by: Lu Kaiyi 2392863668@qq.com --- lib/es_jobs.rb | 109 +++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 100 insertions(+), 9 deletions(-)
diff --git a/lib/es_jobs.rb b/lib/es_jobs.rb index a90b1ba..31a57a9 100644 --- a/lib/es_jobs.rb +++ b/lib/es_jobs.rb @@ -2,7 +2,9 @@ # frozen_string_literal: true
LKP_SRC = ENV['LKP_SRC'] || '/c/lkp-tests' +KEYWORD = %w[suite os arch category job_state tbox_group upstream_repo submit_id group_id]
+require 'yaml' require "#{LKP_SRC}/lib/stats" require_relative './es_query'
@@ -123,17 +125,106 @@ class ESJobs return 0 end
- def output - result = { - 'stats.count' => @stats['stats.count'] - } + def query_jobs_from_es(items) + es = ESQuery.new(ES_HOST, ES_PORT) + result = es.multi_field_query items + jobs = result['hits']['hits'] + jobs.map! { |job| job['_source'] } + return jobs + end + + def get_all_metrics(jobs) + metrics = [] + jobs.each do |job| + stats = job['stats'] + next unless stats + + metrics.concat(stats.keys) + end + metrics.uniq! + end + + def initialize_result_hash(jobs, metrics) + result = {} + result['kvcount'] = {} + result['sum.stats'] = {} + result['raw.stats'] = {} + result['avg.stats'] = {} + result['max.stats'] = {} + result['min.stats'] = {} + metrics.each { |metric| result['raw.stats'][metric] = [] } + result + end + + def set_default_value(result, stats, metrics) + job_metrics = stats.keys + left_metrics = metrics - job_metrics + left_metrics.each { |metric| result['raw.stats'][metric] << nil } + + stats.each do |key, value| + result['raw.stats'][key] << value + end + end + + def kvcount(result, job) + KEYWORD.each do |keyword| + next unless job[keyword]
- @stats.each do |key, value| - result[key] = value if @fields.include?(key) + result['kvcount']["#{keyword}=#{job[keyword]}"] ||= 0 + result['kvcount']["#{keyword}=#{job[keyword]}"] += 1 end + end
- @result['stats_filter_result'] = @stats_filter_result unless @stats_filter.empty? - @result.merge!(result) - puts JSON.pretty_generate(@result) + def stats_count(result) + result['raw.stats'].each do |key, value| + if function_stat?(key) + result['sum.stats'][key] = value.compact.size + else + result['avg.stats'][key] = value.compact.sum / value.compact.size.to_f + result['max.stats'][key] = value.compact.max + result['min.stats'][key] = value.compact.min + end + result['raw.stats'][key] = value.to_s + end + end + + def query_jobs_state(jobs) + metrics = get_all_metrics(jobs) + result = initialize_result_hash(jobs, metrics) + jobs.each do |job| + stats = job['stats'] + next unless stats + + set_default_value(result, stats, metrics) + kvcount(result, job) + end + + stats_count(result) + result + end + + def compact_hash(prefix, result) + result.each do |key, value| + if prefix.empty? + prefix_key = "#{key}" + else + prefix_key = "#{prefix}.#{key}" + end + + if value.is_a? Hash + compact_hash(prefix_key, value) + else + @results[prefix_key] = value + end + end + end + + def output + jobs = query_jobs_from_es(@es_query) + @result = query_jobs_state(jobs) + @results = {} + compact_hash('', @result) + puts @results.to_yaml + #puts JSON.pretty_generate(@results) end end
On Fri, Jan 15, 2021 at 11:47:07AM +0800, Lu Kaiyi wrote:
[why] Reduce the nesting of results and output yaml
[example] es-jobs submit_id=a7c2f144-aa64-4a23-a390-cfe5bca3b430
[output]
kvcount.suite=iperf: 2 kvcount.os=openeuler: 2 kvcount.arch=aarch64: 2 kvcount.job_state=finished: 2 kvcount.tbox_group=vm-2p16g: 2 kvcount.submit_id=a7c2f144-aa64-4a23-a390-cfe5bca3b430: 2 sum.stats.kmsg.timestamp:last: 2 sum.stats.dmesg.timestamp:last: 2 raw.stats.kmsg.timestamp:last: "[75.256233, 48.990905]" raw.stats.iperf.tcp.receiver.bps: "[34017924155.510155, nil]" raw.stats.dmesg.timestamp:last: "[75.256233, 48.990905]" raw.stats.iperf.tcp.sender.bps: "[34073687935.01113, nil]" raw.stats.iperf.udp.bps: "[nil, 1048573.3083402428]" avg.stats.iperf.tcp.receiver.bps: 34017924155.510155 avg.stats.iperf.tcp.sender.bps: 34073687935.01113 avg.stats.iperf.udp.bps: 1048573.3083402428 max.stats.iperf.tcp.receiver.bps: 34017924155.510155 max.stats.iperf.tcp.sender.bps: 34073687935.01113 max.stats.iperf.udp.bps: 1048573.3083402428 min.stats.iperf.tcp.receiver.bps: 34017924155.510155 min.stats.iperf.tcp.sender.bps: 34073687935.01113 min.stats.iperf.udp.bps: 1048573.3083402428
Signed-off-by: Lu Kaiyi 2392863668@qq.com
lib/es_jobs.rb | 109 +++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 100 insertions(+), 9 deletions(-)
diff --git a/lib/es_jobs.rb b/lib/es_jobs.rb index a90b1ba..31a57a9 100644 --- a/lib/es_jobs.rb +++ b/lib/es_jobs.rb @@ -2,7 +2,9 @@ # frozen_string_literal: true
LKP_SRC = ENV['LKP_SRC'] || '/c/lkp-tests' +KEYWORD = %w[suite os arch category job_state tbox_group upstream_repo submit_id group_id]
+require 'yaml' require "#{LKP_SRC}/lib/stats" require_relative './es_query'
@@ -123,17 +125,106 @@ class ESJobs return 0 end
- def output
- result = {
'stats.count' => @stats['stats.count']
- }
- def query_jobs_from_es(items)
- es = ESQuery.new(ES_HOST, ES_PORT)
- result = es.multi_field_query items
- jobs = result['hits']['hits']
- jobs.map! { |job| job['_source'] }
- return jobs
- end
- def get_all_metrics(jobs)
- metrics = []
- jobs.each do |job|
stats = job['stats']
next unless stats
metrics.concat(stats.keys)
- end
- metrics.uniq!
- end
- def initialize_result_hash(jobs, metrics)
- result = {}
- result['kvcount'] = {}
- result['sum.stats'] = {}
- result['raw.stats'] = {}
- result['avg.stats'] = {}
- result['max.stats'] = {}
- result['min.stats'] = {}
above lines =>
result = { 'kvcount' => {}, 'sum.stats' => {}, 'raw.stats' => {}, ... }
- metrics.each { |metric| result['raw.stats'][metric] = [] }
- result
- end
- def set_default_value(result, stats, metrics)
- job_metrics = stats.keys
- left_metrics = metrics - job_metrics
redundant variable assignment => left_metrics = metrics - stats.keys
- left_metrics.each { |metric| result['raw.stats'][metric] << nil }
- stats.each do |key, value|
result['raw.stats'][key] << value
- end
- end
- def kvcount(result, job)
- KEYWORD.each do |keyword|
next unless job[keyword]
- @stats.each do |key, value|
result[key] = value if @fields.include?(key)
result['kvcount']["#{keyword}=#{job[keyword]}"] ||= 0
endresult['kvcount']["#{keyword}=#{job[keyword]}"] += 1
- end
- @result['stats_filter_result'] = @stats_filter_result unless @stats_filter.empty?
- @result.merge!(result)
- puts JSON.pretty_generate(@result)
- def stats_count(result)
- result['raw.stats'].each do |key, value|
if function_stat?(key)
result['sum.stats'][key] = value.compact.size
else
result['avg.stats'][key] = value.compact.sum / value.compact.size.to_f
result['max.stats'][key] = value.compact.max
result['min.stats'][key] = value.compact.min
end
result['raw.stats'][key] = value.to_s
- end
- end
- def query_jobs_state(jobs)
- metrics = get_all_metrics(jobs)
- result = initialize_result_hash(jobs, metrics)
- jobs.each do |job|
stats = job['stats']
next unless stats
set_default_value(result, stats, metrics)
kvcount(result, job)
- end
- stats_count(result)
- result
- end
- def compact_hash(prefix, result)
- result.each do |key, value|
if prefix.empty?
prefix_key = "#{key}"
else
prefix_key = "#{prefix}.#{key}"
end
if value.is_a? Hash
compact_hash(prefix_key, value)
else
@results[prefix_key] = value
end
- end
- end
- def output
- jobs = query_jobs_from_es(@es_query)
- @result = query_jobs_state(jobs)
- @results = {}
- compact_hash('', @result)
- puts @results.to_yaml
- #puts JSON.pretty_generate(@results)
delete useless line
end end -- 2.23.0