[why] Reduce the nesting of results and output yaml
[example] es-jobs os=archlinux
[output]
kvcount.arch=aarch64: 43 kvcount.arch=x86_64: 2 kvcount.category=benchmark: 12 kvcount.category=functional: 33 kvcount.job_state=failed: 20 kvcount.job_state=finished: 25 kvcount.os=archlinux: 45 kvcount.suite=cci-depends: 28 kvcount.suite=cci-makepkg: 5 kvcount.suite=iperf: 3 kvcount.suite=iperf-walk-os-test: 9 kvcount.summary.any_error=1: 24 kvcount.summary.any_fail=1: 25 kvcount.summary.any_stderr=1: 44 kvcount.summary.any_warning=1: 23 kvcount.summary.success=1: 1 kvcount.tbox_group=vm-2p16g--wangyong: 1 kvcount.tbox_group=vm-2p16g--wcl: 1 kvcount.tbox_group=vm-2p16g.wangyong: 34 kvcount.tbox_group=vm-2p8g: 9 ... raw.id.[job_state=failed]: ["crystal.122405","crystal.122406","crystal.122331","crystal.122365","crystal.122404","crystal.122438","crystal.122607","crystal.122370","crystal.122375","crystal.122410","crystal.122900","crystal.122906","crystal.122271","crystal.122451","crystal.122835","crystal.122834","crystal.122232","crystal.122403","crystal.122453","crystal.122886"] ... raw.id.[summary.any_error=1]: ["crystal.122436","crystal.122476","crystal.122803","crystal.122883","crystal.122910","crystal.122438","crystal.122452","crystal.122895","crystal.122446","crystal.122685","crystal.122687","crystal.122833","crystal.122451","crystal.122690","crystal.122719","crystal.122882","crystal.122890","crystal.122453","crystal.122443","crystal.122435","crystal.122432","crystal.122688","crystal.122689","crystal.122886"] ... sum.stats.stderr./lkp/lkp/src/monitors/perf-stat:#:main: 12 sum.stats.stderr.Can_not_find_perf_command: 12 sum.stats.stderr./lkp/lkp/src/tests/wrapper:line#:which:command_not_found: 10 ... raw.stats.sched_debug.cfs_rq:/.load.stddev: [524288.0,null,null,null,null,null,null,null,null,524288.0,524288.0,null,null,null,null,null,516608.0,null,1048576.0,null,null,null,null,null,null,null,524288.0,null,null,null,null,2104832.0,1572864.0,null,null,2097152.0,null,null,null,null,null,null,null,null,null] ... avg.stats.sched_debug.cfs_rq:/.load.stddev: 1048576.0 avg.stats.softirqs.CPU1.NET_RX: 2.5833333333333335 avg.stats.slabinfo.kmalloc-512.active_objs: 1372.75 ... max.stats.sched_debug.cfs_rq:/.load.stddev: 2104832.0 max.stats.softirqs.CPU1.NET_RX: 6 max.stats.slabinfo.kmalloc-512.active_objs: 1616 ...
Signed-off-by: Lu Kaiyi 2392863668@qq.com --- lib/es_jobs.rb | 201 +++++++++++++++++++++++++++---------------------- 1 file changed, 111 insertions(+), 90 deletions(-)
diff --git a/lib/es_jobs.rb b/lib/es_jobs.rb index a90b1ba..af0a1a9 100644 --- a/lib/es_jobs.rb +++ b/lib/es_jobs.rb @@ -3,137 +3,158 @@
LKP_SRC = ENV['LKP_SRC'] || '/c/lkp-tests'
+KEYWORD = %w[suite os arch category job_state tbox_group upstream_repo summary.success + summary.any_fail summary.any_error summary.any_stderr summary.any_warning].freeze + +require 'json' require "#{LKP_SRC}/lib/stats" require_relative './es_query'
# deal jobs search from es class ESJobs def initialize(es_query, my_refine = [], fields = [], stats_filter = []) - @es_query = es_query - @es = ESQuery.new(ES_HOST, ES_PORT) + @jobs = query_jobs_from_es(es_query) @refine = my_refine @fields = fields @stats_filter = stats_filter - @stats_filter_result = {} @refine_jobs = [] - @jobs = {} - @stats_level = { - 0 => 'stats.success', - 1 => 'stats.unknown', - 2 => 'stats.warning', - 3 => 'stats.has_error' - } - set_defaults - deal_jobs + set_jobs_summary end
- def set_defaults - query_result = @es.multi_field_query(@es_query) - query_result['hits']['hits'].each do |job| - @jobs[job['_id']] = job['_source'] - end - - @stats = { - 'stats.count' => Hash.new(0), - 'stats.sum' => Hash.new(0), - 'stats.avg' => Hash.new(0) - } - @result = {} - @fields.each do |field| - @result[field] = [] - end + def query_jobs_from_es(items) + es = ESQuery.new(ES_HOST, ES_PORT) + result = es.multi_field_query items + jobs = result['hits']['hits'] + jobs.map! { |job| job['_source'] } + return jobs end
- def add_result_fields(job, level) - return unless @refine.include?(level) || @refine.include?(-1) + def set_job_summary(stats, job) + summary_result = '' + if stats.keys.any? { |stat| stat.match(/warn/i) } + job['summary.any_warning'] = 1 + summary_result = 'warning' + end + if stats.keys.any? { |stat| stat.match(/stderr/i) } + job['summary.any_stderr'] = 1 + summary_result = 'stderr' + end + if stats.keys.any? { |stat| stat.match(/error|nr_fail=0/i) } + job['summary.any_error'] = 1 + summary_result = 'error' + end + if stats.keys.any? { |stat| stat.match(/fail/i) } && + stats.keys.all? { |stat| !stat.match(/nr_fail=0/i) } + job['summary.any_fail'] = 1 + summary_result = 'fail' + end + return unless summary_result.empty?
- @refine_jobs << job['id'] - @fields.each do |field| - value = job[field] - if value - value = job['id'] + '.' + value if field == 'job_state' - @result[field] << value - end + job['summary.success'] = 1 + end
- next unless job['stats'] + # set jobs summary fields information in place + def set_jobs_summary + @jobs.each do |job| + stats = job['stats'] + next unless stats
- @result[field] << job['stats'][field] if job['stats'][field] + set_job_summary(stats, job) end end
- def deal_jobs - stats_count = Hash.new(0) - stats_jobs = {} + def get_all_metrics(jobs) + metrics = [] + jobs.each do |job| + stats = job['stats'] + next unless stats
- @jobs.each do |job_id, job| - level = deal_stats(job) - add_result_fields(job, level) - - stat_key = @stats_level[level] - stat_jobs_key = stat_key + '_jobs' - - stats_count[stat_key] += 1 - stats_jobs[stat_jobs_key] ||= [] - stats_jobs[stat_jobs_key] << job_id + metrics.concat(stats.keys) end + metrics.uniq! + end
- @stats['stats.count'].merge!(stats_count) - @stats['stats.count'].merge!(stats_jobs) + def initialize_result_hash(metrics) + result = { + 'kvcount' => {}, + 'raw.id' => {}, + 'sum.stats' => {}, + 'raw.stats' => {}, + 'avg.stats' => {}, + 'max.stats' => {}, + 'min.stats' => {} + } + metrics.each { |metric| result['raw.stats'][metric] = [] } + result end
- def deal_stats(job, level = 0) - return 1 unless job['stats'] + def set_default_value(result, stats, metrics) + left_metrics = metrics - stats.keys + left_metrics.each { |metric| result['raw.stats'][metric] << nil }
- job['stats'].each do |key, value| - match_stats_filter(key, value, job['id']) - calculate_stat(key, value) - level = get_stat_level(key, level) + stats.each do |key, value| + result['raw.stats'][key] << value end - return level end
- def match_stats_filter(key, value, job_id) - @stats_filter.each do |filter| - next unless key.include?(filter) + def kvcount(result, job) + KEYWORD.each do |keyword| + next unless job[keyword]
- key = job_id + '.' + key - @stats_filter_result[key] = value - - break + result['kvcount']["#{keyword}=#{job[keyword]}"] ||= 0 + result['kvcount']["#{keyword}=#{job[keyword]}"] += 1 + result['raw.id']["[#{keyword}=#{job[keyword]}]"] ||= [] + result['raw.id']["[#{keyword}=#{job[keyword]}]"] << job['id'] end end
- def calculate_stat(key, value) - if function_stat?(key) - return unless @fields.include?('stats.sum') - - @stats['stats.sum'][key] += value - else - return unless @fields.include?('stats.avg') - - @stats['stats.avg'][key] = (@stats['stats.avg'][key] + value) / 2 + def stats_count(result) + result['raw.stats'].each do |key, value| + if function_stat?(key) + result['sum.stats'][key] = value.compact.size + else + result['avg.stats'][key] = value.compact.sum / value.compact.size.to_f + result['max.stats'][key] = value.compact.max + result['min.stats'][key] = value.compact.min + end end end
- def get_stat_level(stat, level) - return level if level >= 3 - return 3 if stat.match(/error|fail/i) - return 2 if stat.match(/warn/i) + def query_jobs_state(jobs) + metrics = get_all_metrics(jobs) + result = initialize_result_hash(metrics) + jobs.each do |job| + stats = job['stats'] + next unless stats + + set_default_value(result, stats, metrics) + kvcount(result, job) + end
- return 0 + stats_count(result) + result end
- def output - result = { - 'stats.count' => @stats['stats.count'] - } + def output_yaml(prefix, result) + result.each do |key, value| + if prefix.empty? + prefix_key = "#{key}" + else + prefix_key = "#{prefix}.#{key}" + end
- @stats.each do |key, value| - result[key] = value if @fields.include?(key) + if value.is_a? Hash + output_yaml(prefix_key, value) + else + puts "#{prefix_key}: #{value.to_json}" + end end + end
- @result['stats_filter_result'] = @stats_filter_result unless @stats_filter.empty? - @result.merge!(result) - puts JSON.pretty_generate(@result) + def output + @result = query_jobs_state(@jobs) + @result['kvcount'] = @result['kvcount'].sort.to_h + @result['raw.id'] = @result['raw.id'].sort.to_h + output_yaml('', @result) end end