mailweb.openeuler.org
Manage this list

Keyboard Shortcuts

Thread View

  • j: Next unread message
  • k: Previous unread message
  • j a: Jump to all threads
  • j l: Jump to MailingList overview

Compass-ci

Threads by month
  • ----- 2025 -----
  • August
  • July
  • June
  • May
  • April
  • March
  • February
  • January
  • ----- 2024 -----
  • December
  • November
  • October
  • September
  • August
  • July
  • June
  • May
  • April
  • March
  • February
  • January
  • ----- 2023 -----
  • December
  • November
  • October
  • September
  • August
  • July
  • June
  • May
  • April
  • March
  • February
  • January
  • ----- 2022 -----
  • December
  • November
  • October
  • September
  • August
  • July
  • June
  • May
  • April
  • March
  • February
  • January
  • ----- 2021 -----
  • December
  • November
  • October
  • September
  • August
  • July
  • June
  • May
  • April
  • March
  • February
  • January
  • ----- 2020 -----
  • December
  • November
  • October
  • September
compass-ci@openeuler.org

  • 5233 discussions
[PATCH compass-ci] delimiter/utils: add query job stats multiple times
by Cao Xueliang 29 Dec '20

29 Dec '20
Sometimes, the job state is already extract_finished, but we can't query the job stats, so add a temporary solutions, query the job stats multiple times. The extract container will be improved in future. Signed-off-by: Cao Xueliang <caoxl78320(a)163.com> --- src/delimiter/utils.rb | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/src/delimiter/utils.rb b/src/delimiter/utils.rb index 372e821..17128ab 100644 --- a/src/delimiter/utils.rb +++ b/src/delimiter/utils.rb @@ -83,10 +83,10 @@ module Utils extract_finished = monitor_run_stop(query) return nil unless extract_finished.zero? - es = ESQuery.new - new_job = es.query_by_id(new_job_id) + stats = query_stats(new_job_id, 10) + raise "es cant query #{new_job_id} stats field!" unless stats - status = new_job['stats'].key?(error_id) ? 'bad' : 'good' + status = stats.key?(error_id) ? 'bad' : 'good' puts "new_job_id: #{new_job_id}" puts "upstream_commit: #{job['upstream_commit']}" record_jobs(new_job_id, job['upstream_commit']) @@ -94,6 +94,21 @@ module Utils return status end + # sometimes the job_state is extract_finished + # but we cant query the job stats field in es, so, add many times query + # this is a temporary solution, the extract container will be improved in future. + def query_stats(job_id, times) + (1..times).each do |i| + new_job = ESQuery.new.query_by_id(job_id) + puts "query stats times: #{i}" + return new_job['stats'] if new_job['stats'] + + sleep 60 + end + + return nil + end + def record_jobs(job_id, job_commit) FileUtils.mkdir_p TMP_RESULT_ROOT unless File.exist? TMP_RESULT_ROOT commit_jobs = File.join(TMP_RESULT_ROOT, 'commit_jobs') -- 2.23.0
1 0
0 0
[PATCH lkp-tests 2/2] my_curl: change the curl request so that the cache can be used.
by Cui Yili 29 Dec '20

29 Dec '20
change curl reuqest, in order to use local cache. E..g https://download.redis.io/releases/redis-4.0.2.tar.gz ==> https://ip:port/download.redis.io/releases/redis-4.0.2.tar.gz -k http://download.redis.io/releases/redis-4.0.2.tar.gz ==> http://ip:port/download.redis.io/releases/redis-4.0.2.tar.gz -k Signed-off-by: cuiyili <2268260388(a)qq.com> --- rootfs/addon/usr/bin/my_curl | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100755 rootfs/addon/usr/bin/my_curl diff --git a/rootfs/addon/usr/bin/my_curl b/rootfs/addon/usr/bin/my_curl new file mode 100755 index 000000000..697f53d65 --- /dev/null +++ b/rootfs/addon/usr/bin/my_curl @@ -0,0 +1,22 @@ +#!/bin/bash + +declare -a array +j=0 + +if [ -n "$HTTP_PROXY_HOST" ]; then + for param in "$@"; + do + if [[ "$param" =~ http://(.*) ]] && [ -n "$HTTP_PROXY_PORT" ]; then + array[$j]="http://$HTTP_PROXY_HOST:$HTTP_PROXY_PORT/${BASH_REMATCH[1]}" + elif [[ "$param" =~ https://(.*) ]] && [ -n "$HTTPS_PROXY_PORT" ]; then + array[$j]="https://$HTTP_PROXY_HOST:$HTTPS_PROXY_PORT/${BASH_REMATCH[1]}" + else + array[$j]="$param" + fi + ((j++)) + done + + command curl -k ${array[*]} +else + command curl "$@" +fi -- 2.23.0
2 1
0 0
[PATCH lkp-tests 1/2] lkp-setup-rootfs: all use of env http_proxy and delete specific proxy settings
by Cui Yili 29 Dec '20

29 Dec '20
Signed-off-by: cuiyili <2268260388(a)qq.com> --- bin/lkp-setup-rootfs | 9 --------- 1 file changed, 9 deletions(-) diff --git a/bin/lkp-setup-rootfs b/bin/lkp-setup-rootfs index cca1b53be..a7f1a7ffb 100755 --- a/bin/lkp-setup-rootfs +++ b/bin/lkp-setup-rootfs @@ -105,7 +105,6 @@ install_proxy_yum() { [ -d "/etc/yum/" ] || return - echo "proxy=http://$SQUID_HOST:$SQUID_PORT" >> /etc/yum.conf if ls /etc/yum.repos.d/*.repo >/dev/null 2>&1; then sed -i 's/https:/http:/g' /etc/yum.repos.d/*.repo fi @@ -117,11 +116,6 @@ install_proxy_apt() { [ -d "/etc/apt/" ] || return - cat << EOF > /etc/apt/apt.conf.d/proxy -Acquire::http::proxy "http://$SQUID_HOST:$SQUID_PORT/"; -Acquire::ftp::proxy "ftp://$SQUID_HOST:$SQUID_PORT/"; -EOF - for cfg in /etc/apt/sources.list /etc/apt/sources.list.d/* do [ -f "$cfg" ] || continue @@ -176,9 +170,6 @@ install_proxy() install_goproxy - [ -z "$SQUID_HOST" ] && return - [ -z "$SQUID_PORT" ] && return - install_proxy_apt || install_proxy_yum } -- 2.23.0
1 0
0 0
[PATCH compass-ci] scheduler: fix: don't return queues individually
by Ren Wen 29 Dec '20

29 Dec '20
- avoid to auto submit idle job if finding no job from sub-queues, because idle-queues are not found yet. - sub-queues should be shuffled. Signed-off-by: Ren Wen <15991987063(a)163.com> --- src/scheduler/find_job_boot.cr | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/scheduler/find_job_boot.cr b/src/scheduler/find_job_boot.cr index 21396c4..a1e3497 100644 --- a/src/scheduler/find_job_boot.cr +++ b/src/scheduler/find_job_boot.cr @@ -72,14 +72,14 @@ class Sched end end - return sub_queues unless sub_queues.empty? - idle_queues = [] of String default_queues.each do |queue| idle_queues << "#{queue}/idle" end - return idle_queues + all_queues = rand_queues(sub_queues) + idle_queues + + return all_queues end def get_job_from_queues(queues, testbox) -- 2.23.0
1 0
0 0
[PATCH compass-ci 2/2] scheduler: fix confused comments and add comments
by Ren Wen 29 Dec '20

29 Dec '20
fix: - use 'is joined with' instead of 'is link with'. - fix format. add: - add a TODO comment. Signed-off-by: Ren Wen <15991987063(a)163.com> --- src/scheduler/find_job_boot.cr | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/scheduler/find_job_boot.cr b/src/scheduler/find_job_boot.cr index f60c0e5..a7b4bd8 100644 --- a/src/scheduler/find_job_boot.cr +++ b/src/scheduler/find_job_boot.cr @@ -25,10 +25,11 @@ class Sched @log.warn(e) end - # auto submit a job to collect the host information - # grub hostname is link with ":", like "00:01:02:03:04:05" - # remind: if like with "-", last "-05" is treated as host number - # then hostname will be "sut-00-01-02-03-04" !!! + # auto submit a job to collect the host information. + # + # grub hostname is joined with ":", like "00:01:02:03:04:05". + # remind: if joined with "-", last "-05" is treated as host number + # then hostname will be "sut-00-01-02-03-04" !!! def submit_host_info_job(mac) host = "sut-#{mac}" @redis.hash_set("sched/mac2host", normalize_mac(mac), host) @@ -63,6 +64,7 @@ class Sched sub_queues = [] of String default_queues.each do |queue| + # TODO: this could be high cost and should be improved in future : keys(pattern). matched_queues = @redis.keys("#{QUEUE_NAME_BASE}/sched/#{queue}/*/ready") next if matched_queues.empty? -- 2.23.0
1 0
0 0
[PATCH compass-ci 1/2] scheduler: find_job_boot: remove useless functions
by Ren Wen 29 Dec '20

29 Dec '20
- find_job : used to find job according to testbox. Now use sub-queues. - get_idle_job : used to find and submit idle jobs. Now use idle-queues. Signed-off-by: Ren Wen <15991987063(a)163.com> --- src/scheduler/find_job_boot.cr | 38 ---------------------------------- 1 file changed, 38 deletions(-) diff --git a/src/scheduler/find_job_boot.cr b/src/scheduler/find_job_boot.cr index 21396c4..f60c0e5 100644 --- a/src/scheduler/find_job_boot.cr +++ b/src/scheduler/find_job_boot.cr @@ -183,32 +183,6 @@ class Sched end end - private def find_job(testbox : String, count = 1) - tbox_group = JobHelper.match_tbox_group(testbox) - tbox = tbox_group.partition("--")[0] - - queue_list = query_consumable_keys(tbox) - - boxes = ["sched/" + testbox, - "sched/" + tbox_group, - "sched/" + tbox, - "sched/" + tbox_group + "/idle"] - boxes.each do |box| - next if queue_list.select(box).size == 0 - count.times do - job = prepare_job(box, testbox) - return job if job - - sleep(1) unless count == 1 - end - end - - # when find no job, auto submit idle job at background - spawn { auto_submit_idle_job(tbox_group) } - - return nil - end - private def prepare_job(queue_name, testbox) response = @task_queue.consume_task(queue_name) job_id = JSON.parse(response[1].to_json)["id"] if response[0] == 200 @@ -232,18 +206,6 @@ class Sched return job end - private def get_idle_job(tbox_group, testbox) - job = prepare_job("sched/#{tbox_group}/idle", testbox) - - # if there has no idle job, auto submit and get 1 - if job.nil? - auto_submit_idle_job(tbox_group) - job = prepare_job("sched/#{tbox_group}/idle", testbox) - end - - return job - end - private def auto_submit_idle_job(testbox) full_path_patterns = "#{CCI_REPOS}/#{LAB_REPO}/allot/idle/#{testbox}/*.yaml" fields = ["testbox=#{testbox}", "subqueue=idle"] -- 2.23.0
1 0
0 0
[PATCH v3 compass-ci] lib/params_group.rb optimize filter_groups()
by Lu Weitao 29 Dec '20

29 Dec '20
[Why] 1. We had for-each key-value in hash, needn't query value by key once more. 2. When remove empty value of Hash(String, Hash(String, Array(job)), remove the first level key-value at first. Signed-off-by: Lu Weitao <luweitaobe(a)163.com> --- lib/params_group.rb | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/params_group.rb b/lib/params_group.rb index 7bb7f5b..9be2cbe 100644 --- a/lib/params_group.rb +++ b/lib/params_group.rb @@ -72,10 +72,11 @@ end def filter_groups(groups) groups.each do |group_key, value| - value.each_key do |dim_key| - value.delete(dim_key) if value[dim_key].empty? + if value.empty? + groups.delete(group_key) + next end - groups.delete(group_key) if groups[group_key].empty? + value.delete_if { |_dim_key, job_list| job_list.empty? } end end -- 2.23.0
1 0
0 0
[PATCH compass-ci] sbin/auto_submit: take the pkgbuild_repo if it is linux
by Lin Jiaxin 29 Dec '20

29 Dec '20
Signed-off-by: Lin Jiaxin <ljx.joe(a)qq.com> --- sbin/auto_submit | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sbin/auto_submit b/sbin/auto_submit index f96be8c..51ecc6a 100755 --- a/sbin/auto_submit +++ b/sbin/auto_submit @@ -20,7 +20,7 @@ class AutoSubmit def get_pkgbuild_repo(repo_array) pkgbuild_repo = "archlinux/#{repo_array[0]}" repo_array.each do |repo| - next unless repo =~ /-git$/ + next unless repo =~ /(-git|linux)$/ pkgbuild_repo = "archlinux/#{repo}" break -- 2.23.0
2 1
0 0
[PATCH lkp-tests] auto_submit: add build-linux.yaml for linux
by Lin Jiaxin 29 Dec '20

29 Dec '20
Signed-off-by: Lin Jiaxin <ljx.joe(a)qq.com> --- jobs/build-linux.yaml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 jobs/build-linux.yaml diff --git a/jobs/build-linux.yaml b/jobs/build-linux.yaml new file mode 100644 index 00000000..080a84b3 --- /dev/null +++ b/jobs/build-linux.yaml @@ -0,0 +1,4 @@ +suite: build-pkg +category: functional +build-pkg: +config: <%= 'aarch64-randconfig-'+SecureRandom.random_number(1000).to_s %> -- 2.23.0
2 1
0 0
[PATCH compass-ci] lib/es_query.rb: get multi-field aggregation count result
by Lu Kaiyi 29 Dec '20

29 Dec '20
[why] get multi-field aggregation count for overview of report page. [example] es = ESQuery.new query_items = { 'os' => 'openeuler' } field1 = 'suite' field2 = 'job_state' es.query_fields(field1, field2, query_items) [input]: field1, field2, query_items (optional for query_items, default no scope limitation) [output]: {"build-pkg"=> {"failed"=>3804, "finished"=>800, "incomplete"=>196, "submit"=>136, "OOM"=>11}, "cci-depends"=>{"finished"=>580, "failed"=>218, "incomplete"=>1}, "iperf"=>{"finished"=>136, "failed"=>57}, "spinlock"=>{"finished"=>52, "failed"=>1}, ... } Signed-off-by: Lu Kaiyi <2392863668(a)qq.com> --- lib/es_query.rb | 59 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/lib/es_query.rb b/lib/es_query.rb index 88a420f..d881f6b 100644 --- a/lib/es_query.rb +++ b/lib/es_query.rb @@ -87,6 +87,52 @@ class ESQuery result end + + # select field1, field2, count(*) from index where query_items group by field1, field2 + # example: + # es = ESQuery.new + # query_items = { + # 'os' => 'openeuler' + # } + # field1 = 'suite' + # field2 = 'job_state' + # es.query_fields(field1, field2, query_items) + # input: + # field1, field2, query_items + # (optional for query_items, default no scope limitation) + # output: + # {"build-pkg"=> + # {"failed"=>3804, "finished"=>800, "incomplete"=>196, "submit"=>136, "OOM"=>11}, + # "cci-depends"=>{"finished"=>580, "failed"=>218, "incomplete"=>1}, + # "iperf"=>{"finished"=>136, "failed"=>57}, + # "spinlock"=>{"finished"=>52, "failed"=>1}, + # ... + # } + def query_fields(field1, field2, query_items = {}) + query = { + query: { + bool: { + must: build_multi_field_subquery_body(query_items) + } + }, + aggs: { + "all_#{field1}" => { + terms: { field: field1, size: 1000 }, + aggs: { + "all_#{field2}" => { + terms: { field: field2, size: 1000 } + } + } + } + }, + size: 0 + } + es_result = @client.search(index: @index + '*', body: query)['aggregations']["all_#{field1}"]['buckets'] + es_result.sort_by! { |h| h['doc_count'] } + es_result.reverse! + + parse_fields(es_result, field2) + end end # Range Query Example: @@ -146,3 +192,16 @@ def assign_desc_body(keyword) }] } end + +def parse_fields(es_result, field) + result_hash = {} + es_result.each do |result| + key = result['key'] + all_field = result["all_#{field}"]['buckets'] + field_count = {} + all_field.map { |item| field_count[item['key']] = item['doc_count'] } + result_hash[key] = field_count unless field_count.empty? + end + + result_hash +end -- 2.23.0
2 1
0 0
  • ← Newer
  • 1
  • ...
  • 239
  • 240
  • 241
  • 242
  • 243
  • 244
  • 245
  • ...
  • 524
  • Older →

HyperKitty Powered by HyperKitty