mailweb.openeuler.org
Manage this list

Keyboard Shortcuts

Thread View

  • j: Next unread message
  • k: Previous unread message
  • j a: Jump to all threads
  • j l: Jump to MailingList overview

Compass-ci

Threads by month
  • ----- 2025 -----
  • August
  • July
  • June
  • May
  • April
  • March
  • February
  • January
  • ----- 2024 -----
  • December
  • November
  • October
  • September
  • August
  • July
  • June
  • May
  • April
  • March
  • February
  • January
  • ----- 2023 -----
  • December
  • November
  • October
  • September
  • August
  • July
  • June
  • May
  • April
  • March
  • February
  • January
  • ----- 2022 -----
  • December
  • November
  • October
  • September
  • August
  • July
  • June
  • May
  • April
  • March
  • February
  • January
  • ----- 2021 -----
  • December
  • November
  • October
  • September
  • August
  • July
  • June
  • May
  • April
  • March
  • February
  • January
  • ----- 2020 -----
  • December
  • November
  • October
  • September
compass-ci@openeuler.org

  • 1 participants
  • 5235 discussions
[PATCH compass-ci 2/2] lib/scheduler_api.cr: add parameters
by Wu Zhende 03 Mar '21

03 Mar '21
Add job_state to indicates the status of a customized job. Add source to specify source. Signed-off-by: Wu Zhende <wuzhende666(a)163.com> --- src/lib/scheduler_api.cr | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/lib/scheduler_api.cr b/src/lib/scheduler_api.cr index b8dd514..50dc495 100644 --- a/src/lib/scheduler_api.cr +++ b/src/lib/scheduler_api.cr @@ -10,9 +10,11 @@ class SchedulerAPI @host = ENV.has_key?("SCHED_HOST") ? ENV["SCHED_HOST"] : "172.17.0.1" end - def close_job(job_id) + def close_job(job_id, job_state = nil, source=nil) + url = "/~lkp/cgi-bin/lkp-post-run?job_id=#{job_id}&source=#{source}" + url += "&job_state=#{job_state}" if job_state client = HTTP::Client.new(@host, port: @port) - response = client.get("/~lkp/cgi-bin/lkp-post-run?job_id=#{job_id}") + response = client.get(url) client.close() return response end -- 2.23.0
1 0
0 0
[PATCH compass-ci 1/2] service/scheduler: send mq message
by Wu Zhende 03 Mar '21

03 Mar '21
When the scheduler service receives a job event, sends a message to the MQ queue. The lifecycle service receives and processes these events. Signed-off-by: Wu Zhende <wuzhende666(a)163.com> --- src/lib/sched.cr | 2 ++ src/scheduler/close_job.cr | 13 +++++++++++++ src/scheduler/constants.cr | 2 ++ src/scheduler/find_job_boot.cr | 13 +++++++++++++ src/scheduler/update_job_parameter.cr | 11 +++++++++++ 5 files changed, 41 insertions(+) diff --git a/src/lib/sched.cr b/src/lib/sched.cr index 5f28de9..bec90cd 100644 --- a/src/lib/sched.cr +++ b/src/lib/sched.cr @@ -4,6 +4,7 @@ require "kemal" require "yaml" +require "./mq" require "./job" require "./web_env" require "./block_helper" @@ -34,6 +35,7 @@ class Sched @es = Elasticsearch::Client.new Redis::Client.set_pool_size(1000) @redis = Redis::Client.instance + @mq = MQClient.instance @task_queue = TaskQueueAPI.new @rgc = RemoteGitClient.new @env = env diff --git a/src/scheduler/close_job.cr b/src/scheduler/close_job.cr index b3e6716..ea1dbf5 100644 --- a/src/scheduler/close_job.cr +++ b/src/scheduler/close_job.cr @@ -6,11 +6,14 @@ class Sched job_id = @env.params.query["job_id"]? return unless job_id + @env.set "job_id", job_id + job = @redis.get_job(job_id) # update job_state job_state = @env.params.query["job_state"]? job["job_state"] = job_state if job_state + job["job_state"] = "complete" if job["job_state"] == "boot" response = @es.set_job_content(job) if response["_id"] == nil @@ -34,5 +37,15 @@ class Sched @log.info(%({"job_id": "#{job_id}", "job_state": "#{job_state}"})) rescue e @log.warn(e) + ensure + source = @env.params.query["source"]? + if source != "lifecycle" + mq_msg = { + "job_id" => @env.get?("job_id").to_s, + "job_state" => "close", + "time" => get_time + } + @mq.pushlish_confirm(JOB_MQ, mq_msg.to_json) + end end end diff --git a/src/scheduler/constants.cr b/src/scheduler/constants.cr index d7d7a9a..3fdb983 100644 --- a/src/scheduler/constants.cr +++ b/src/scheduler/constants.cr @@ -12,6 +12,8 @@ JOB_ES_PORT = 9200 JOB_ES_PORT_DEBUG = 9201 JOB_INDEX_TYPE = "jobs/_doc" +JOB_MQ = "job_mq" + LAB = (ENV.has_key?("lab") ? ENV["lab"] : "nolab") SCHED_HOST = (ENV.has_key?("SCHED_HOST") ? ENV["SCHED_HOST"] : "172.17.0.1") diff --git a/src/scheduler/find_job_boot.cr b/src/scheduler/find_job_boot.cr index b7fe0e7..a876abc 100644 --- a/src/scheduler/find_job_boot.cr +++ b/src/scheduler/find_job_boot.cr @@ -16,6 +16,7 @@ class Sched host = value end + @env.set "testbox", host response = get_job_boot(host, boot_type) job_id = response[/tmpfs\/(.*)\/job\.cgz/, 1]? @log.info(%({"job_id": "#{job_id}", "job_state": "boot"})) if job_id @@ -23,6 +24,15 @@ class Sched response rescue e @log.warn(e) + ensure + mq_msg = { + "job_id" => @env.get?("job_id").to_s, + "testbox" => @env.get?("testbox").to_s, + "deadline" => @env.get?("deadline").to_s, + "time" => get_time, + "job_state" => "boot" + } + @mq.pushlish_confirm(JOB_MQ, mq_msg.to_json) end # auto submit a job to collect the host information. @@ -104,6 +114,9 @@ class Sched if job @es.set_job_content(job) + @env.set "job_id", job["id"] + @env.set "deadline", job["deadline"] + @env.set "job_state", job["job_state"] create_job_cpio(job.dump_to_json_any, Kemal.config.public_folder) else # for physical machines diff --git a/src/scheduler/update_job_parameter.cr b/src/scheduler/update_job_parameter.cr index 744274a..cc4cf87 100644 --- a/src/scheduler/update_job_parameter.cr +++ b/src/scheduler/update_job_parameter.cr @@ -8,6 +8,8 @@ class Sched return false end + @env.set "job_id", job_id + # try to get report value and then update it job_content = {} of String => String job_content["id"] = job_id @@ -30,7 +32,16 @@ class Sched log = job_content.dup log["job_id"] = log.delete("id").not_nil! @log.info(log.to_json) + + @env.set "job_state", job_content["job_state"]? rescue e @log.warn(e) + ensure + mq_msg = { + "job_id" => @env.get?("job_id").to_s, + "job_state" => (@env.get?("job_state") || "update").to_s, + "time" => get_time + } + @mq.pushlish_confirm(JOB_MQ, mq_msg.to_json) end end -- 2.23.0
1 0
0 0
[PATCH compass-ci] doc/manual: add document for submit container
by Luan Shengde 03 Mar '21

03 Mar '21
Signed-off-by: Luan Shengde <shdluan(a)163.com> --- doc/manual/build-lkp-test-container.en.md | 57 +++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 doc/manual/build-lkp-test-container.en.md diff --git a/doc/manual/build-lkp-test-container.en.md b/doc/manual/build-lkp-test-container.en.md new file mode 100644 index 0000000..b94fcc3 --- /dev/null +++ b/doc/manual/build-lkp-test-container.en.md @@ -0,0 +1,57 @@ +# Preface + +We provide a docker container to suit various of Linux OS(es). +In this case you do not need install the lkp-tests to your own server. +Also you can avoid installation failures for undesired dependency packages. + +# Prepare + +- install docker +- apply account and config default yaml +- generate ssh keys + +# build submit container + +## 1. download resource + + Use the following command to downloac lkp-test and compass-ci + + git clone https://gitee.com/wu_fengguang/compass-ci.git + git clone https://gitee.com/wu_fengguang/lkp-tests.git + +## 2. setup environment variables + + Command: + + echo "export LKP_SRC=$PWD/lkp-tests" >> ~/.${SHELL##*/}rc + echo "export CCI_SRC=$PWD/compass-ci" >> ~/.${SHELL##*/}rc + source ~/.${SHELL##*/}rc + +## 3. build submit image + + Command: + + cd compass-ci/container/submit + ./build + +## 4. add executable file + + Command: + + ln -s $CCI_SRC/container/submit/submit /usr/bin/submit + +# try it + + instruction: + + You can directly use the command 'submit' to submit jobs. + It is the same as you install the lkp-tests at your own server. + It will start a disposable contanier to submit your job. + + Example: + + submit -c -m testbox=vm-2p8g borrow-1h.yaml + + Command helps: + + For detailed usage for command submit, reference to: [submit user manual](https://gitee.com/wu_fengguang/compass-ci/blob/master/doc/manual/su… -- 2.23.0
2 2
0 0
[PATCH compass-ci 1/4] sparrow/install-client: add deploy compass-ci client script
by Liu Yinsi 03 Mar '21

03 Mar '21
deploy compass-ci client, provides function: submit job to compass-ci server and boot testbox. Signed-off-by: Liu Yinsi <liuyinsi(a)163.com> --- sparrow/install-client | 71 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100755 sparrow/install-client diff --git a/sparrow/install-client b/sparrow/install-client new file mode 100755 index 0000000..8d69267 --- /dev/null +++ b/sparrow/install-client @@ -0,0 +1,71 @@ +#!/bin/sh +# SPDX-License-Identifier: MulanPSL-2.0+ +# Copyright (c) 2020 Huawei Technologies Co., Ltd. All rights reserved. + +[[ $CCI_SRC ]] || export CCI_SRC=$(cd $(dirname $(realpath $0)); git rev-parse --show-toplevel) + +. $CCI_SRC/lib/log.sh + +check_server_ip() +{ + [ -z "$server_ip" ] && . 0-package/read-config + [ -z "$server_ip" ] && { + log_error "env \$server_ip is not exists, please config $CCI_SRC/sparrow/setup.yaml" + exit 1 + } +} + +install_env() +{ + cd /c/compass-ci/sparrow || return + check_server_ip + 0-package/install + 1-storage/tiny + 5-build/ipxe & + 1-storage/permission + 2-network/br0 + 2-network/iptables + 3-code/git + 3-code/dev-env + . /etc/profile.d/compass.sh + 5-build/register-account +} + +boot_ipxe() +{ + SCHED_HOST=$server_ip + sed -i "s%172.17.0.1%${SCHED_HOST}%g" /tftpboot/boot.ipxe + sed -i "s%3000%${SCHED_PORT:-3000}%g" /tftpboot/boot.ipxe +} + +run_service() +{ + ( + cd $CCI_SRC/container/dnsmasq || return + ./build + ./start + boot_ipxe + )& + ( + cd $CCI_SRC/container/qemu-efi || return + ./build + ./install + )& + ( + cd $CCI_SRC/container/fluentd-base || return + ./build + + cd $CCI_SRC/container/sub-fluentd || return + ./build + ./start + )& +} + +main() +{ + install_env + run_service +} + +main +wait -- 2.23.0
2 2
0 0
[PATCH compass-ci] service/scheduler: set lifecycle info when boot
by Wu Zhende 03 Mar '21

03 Mar '21
When a machine calls boot API, no matter whether the job is retrieved or not, it indicates that the machine is alive. Therefore, we need to update the ES storage information. If the job is obtained, set the deadline. Signed-off-by: Wu Zhende <wuzhende666(a)163.com> --- src/lib/job.cr | 8 ++++++++ src/lib/sched.cr | 37 +++++++++++++++++++--------------- src/scheduler/find_job_boot.cr | 3 ++- 3 files changed, 31 insertions(+), 17 deletions(-) diff --git a/src/lib/job.cr b/src/lib/job.cr index 63ec8fb..f4ca5de 100644 --- a/src/lib/job.cr +++ b/src/lib/job.cr @@ -242,6 +242,14 @@ class Job self["rootfs"] = "#{os}-#{os_version}-#{os_arch}" end + def get_deadline + runtime = (self["timeout"]? || self["runtime"]?).to_s + runtime = 1800 if runtime.empty? + + # reserve 300 seconds for system startup, hw machine will need such long time + (Time.local + (runtime.to_i32 * 2 + 300).second).to_s("%Y-%m-%dT%H:%M:%S+0800") + end + def set_result_root update_tbox_group_from_testbox # id must exists, need update tbox_group self["result_root"] = File.join("/result/#{suite}/#{submit_date}/#{tbox_group}/#{rootfs}", "#{pp_params}", "#{id}") diff --git a/src/lib/sched.cr b/src/lib/sched.cr index dfec365..5f28de9 100644 --- a/src/lib/sched.cr +++ b/src/lib/sched.cr @@ -103,12 +103,15 @@ class Sched @log.warn(e) end + def get_time + Time.local.to_s("%Y-%m-%dT%H:%M:%S+0800") + end + def update_tbox_wtmp testbox = "" hash = Hash(String, String | Nil).new - time = Time.local.to_s("%Y-%m-%d %H:%M:%S") - hash["time"] = time + hash["time"] = get_time %w(mac ip job_id tbox_name tbox_state).each do |parameter| if (value = @env.params.query[parameter]?) @@ -117,7 +120,6 @@ class Sched testbox = value when "tbox_state" hash["state"] = value - hash["deadline"] = nil if value == "rebooting" when "mac" hash["mac"] = normalize_mac(value) else @@ -136,23 +138,26 @@ class Sched @log.warn(e) end - def set_tbox_boot_wtmp(job : Job) - time = Time.local - booting_time = time.to_s("%Y-%m-%dT%H:%M:%S") - - runtime = (job["timeout"]? || job["runtime"]?).to_s - runtime = 1800 if runtime.empty? + def set_lifecycle(job, testbox) + if job + deadline = job.get_deadline + job["deadline"] = deadline + job["job_state"] = "boot" + state = "booting" + job_id = job["id"] + else + deadline = nil + job_id = "" + state = "requesting" + end - # reserve 300 seconds for system startup, hw machine will need such long time - deadline = (time + (runtime.to_i32 * 2 + 300).second).to_s("%Y-%m-%dT%H:%M:%S") hash = { - "job_id" => job["id"], - "state" => "booting", - "booting_time" => booting_time, + "job_id" => job_id, + "state" => state, + "time" => get_time, "deadline" => deadline } - - @es.update_tbox(job["testbox"], hash) + @es.update_tbox(testbox.to_s, hash) end def report_ssh_port diff --git a/src/scheduler/find_job_boot.cr b/src/scheduler/find_job_boot.cr index 3323d4d..b7fe0e7 100644 --- a/src/scheduler/find_job_boot.cr +++ b/src/scheduler/find_job_boot.cr @@ -100,10 +100,11 @@ class Sched def get_job_boot(host, boot_type) queues = get_queues(host) job = get_job_from_queues(queues, host) + set_lifecycle(job, host) if job + @es.set_job_content(job) create_job_cpio(job.dump_to_json_any, Kemal.config.public_folder) - set_tbox_boot_wtmp(job) else # for physical machines spawn { auto_submit_idle_job(host) } -- 2.23.0
1 0
0 0
[PATCH compass-ci] lib: parse mail list from yaml by type as to and bcc mail list
by Cao Xueliang 03 Mar '21

03 Mar '21
Yaml file define: delimiter: to: mai1, mail3 bcc: mail2, mail3 The mail_bisect_result calls parse_mail_list('delimiter') to get the delimiter mail list. Signed-off-by: Cao Xueliang <caoxl78320(a)163.com> --- lib/mail_bisect_result.rb | 14 +++++++++++--- lib/mail_list.yaml | 3 +++ lib/parse_mail_list.rb | 13 +++++++++++++ 3 files changed, 27 insertions(+), 3 deletions(-) create mode 100644 lib/mail_list.yaml create mode 100644 lib/parse_mail_list.rb diff --git a/lib/mail_bisect_result.rb b/lib/mail_bisect_result.rb index 104f098..23a8ade 100644 --- a/lib/mail_bisect_result.rb +++ b/lib/mail_bisect_result.rb @@ -6,6 +6,7 @@ require_relative 'git' require_relative 'es_query' require_relative 'constants' require_relative 'mail_client' +require_relative 'parse_mail_list' require_relative 'assign_account_client' # compose and send email for bisect result @@ -22,10 +23,17 @@ class MailBisectResult @git_commit = GitCommit.new(@work_dir, @commit_id) @to = @git_commit.author_email # now send mail to review - @bcc = 'caoxl(a)crystal.ci, caoxl78320(a)163.com, huming15(a)163.com, wfg(a)mail.ustc.edu.cn' + @bcc = 'caoxl78320(a)163.com, huming15(a)163.com, wfg(a)mail.ustc.edu.cn' + end + + def parse_mail_info + mail_hash = parse_mail_list('delimiter') + @to = mail_hash['to'] if mail_hash.key?('to') + @bcc = mail_hash['bcc'] if mail_hash.key?('bcc') end def create_send_email + parse_mail_info send_report_mail(compose_mail) send_account_mail rm_work_dir @@ -35,7 +43,7 @@ class MailBisectResult subject = "[Compass-CI][#{(a)repo.split('/')[1]}] #{@commit_id[0..9]} #{(a)bisect_error[0].split("\n")[0]}" prefix_srv = "http://#{SRV_HTTP_DOMAIN}:#{SRV_HTTP_PORT}" bisect_job_url = ENV['result_root'] ? "bisect job result directory:\n#{prefix_srv}#{ENV['result_root']}\n" : '' - bisect_report_doc = "bisect email doc:\nhttps://gitee.com/wu_fengguang/compass-ci/blob/master/doc/bisect_emai…" + report_doc = "bisect email doc:\nhttps://gitee.com/wu_fengguang/compass-ci/blob/master/doc/bisect_emai…" pkgbuild_repo_url = "PKGBUILD:\n#{prefix_srv}/git/#{@pkgbuild_repo}\n" first_bad_commit_job_url = "first bad commit job result directory:\n#{prefix_srv}#{@first_bad_commit_result_root}\n" @@ -58,7 +66,7 @@ class MailBisectResult #{pkgbuild_repo_url} #{first_bad_commit_job_url} #{bisect_job_url} - #{bisect_report_doc} + #{report_doc} Regards, Compass CI team BODY diff --git a/lib/mail_list.yaml b/lib/mail_list.yaml new file mode 100644 index 0000000..d982c93 --- /dev/null +++ b/lib/mail_list.yaml @@ -0,0 +1,3 @@ +delimiter: + to: caoxl78320(a)163.com + bcc: caoxl78320(a)163.com, wfg(a)mail.ustc.edu.cn, ljx.joe(a)qq.com, huming15(a)163.com diff --git a/lib/parse_mail_list.rb b/lib/parse_mail_list.rb new file mode 100644 index 0000000..49b5c88 --- /dev/null +++ b/lib/parse_mail_list.rb @@ -0,0 +1,13 @@ +# SPDX-License-Identifier: MulanPSL-2.0+ +# frozen_string_literal: true + +require 'yaml' + +def parse_mail_list(type) + content = {} + mail_list_yaml = './mail_list.yaml' + content = YAML.safe_load(File.open(mail_list_yaml)) if FileTest.exists?(mail_list_yaml) + return {} unless content[type] + + return content[type] +end -- 2.23.0
2 5
0 0
[PATCH compass-ci] src/stats_worker:delete filter error_id and add log
by Cao Xueliang 02 Mar '21

02 Mar '21
We add the filter error_id in commit: 1c154d888fab1fce4623663b4a88b9a1630171c2, we don't need it now, so delete the filter error_id, add the new_error_id log for statistical. Signed-off-by: Cao Xueliang <caoxl78320(a)163.com> --- src/extract-stats/stats_worker.cr | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/src/extract-stats/stats_worker.cr b/src/extract-stats/stats_worker.cr index 01bae21..836d1f8 100644 --- a/src/extract-stats/stats_worker.cr +++ b/src/extract-stats/stats_worker.cr @@ -82,25 +82,20 @@ class StatsWorker } ) - error_id = select_error_id(check_new_error_ids(error_ids, job_id)) - if error_id + new_error_ids = check_new_error_ids(error_ids, job_id) + unless new_error_ids.empty? + sample_error_id = new_error_ids.sample STDOUT.puts "send a delimiter task: job_id is #{job_id}" - @tq.add_task(DELIMITER_TASK_QUEUE, JSON.parse({"error_id" => error_id, + @tq.add_task(DELIMITER_TASK_QUEUE, JSON.parse({"error_id" => sample_error_id, "job_id" => job_id, "lab" => LAB}.to_json)) + msg = %({"job_id": "#{job_id}", "new_error_id": "#{sample_error_id}"}) + system "echo '#{msg}'" end msg = %({"job_id": "#{job_id}", "job_state": "extract_finished"}) system "echo '#{msg}'" end - def select_error_id(new_error_ids : Array) - new_error_ids.each do |error_id| - return error_id if /(cpp|c|h):(warning|error)/i =~ error_id - end - - return nil - end - def check_new_error_ids(error_ids : Array, job_id : String) new_error_ids = [] of String error_ids.each do |error_id| -- 2.23.0
1 0
0 0
[PATCH v3 compass-ci 4/4] container/dracut-initrd: add 90lkp module into Dockerfile
by Xu Xijian 02 Mar '21

02 Mar '21
Signed-off-by: Xu Xijian <hdxuxijian(a)163.com> --- container/dracut-initrd/Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/container/dracut-initrd/Dockerfile b/container/dracut-initrd/Dockerfile index 7f8448f..b8c551f 100644 --- a/container/dracut-initrd/Dockerfile +++ b/container/dracut-initrd/Dockerfile @@ -6,6 +6,8 @@ FROM debian MAINTAINER Wu Fenguang <wfg(a)mail.ustc.edu.cn> ADD bin /usr/local/bin +ADD modules.d /usr/lib/modules.d +COPY modules.d/90lkp /usr/lib/dracut/modules.d/90lkp COPY conf/sources.list* /etc/apt/ COPY conf/add-modules.conf /etc/dracut.conf.d/ WORKDIR /usr/local/bin -- 2.23.0
1 0
0 0
[PATCH v3 compass-ci 2/4] container/dracut-initrd: README of module 90lkp
by Xu Xijian 02 Mar '21

02 Mar '21
Signed-off-by: Xu Xijian <hdxuxijian(a)163.com> --- container/dracut-initrd/modules.d/90lkp/README | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 container/dracut-initrd/modules.d/90lkp/README diff --git a/container/dracut-initrd/modules.d/90lkp/README b/container/dracut-initrd/modules.d/90lkp/README new file mode 100644 index 0000000..7d7a205 --- /dev/null +++ b/container/dracut-initrd/modules.d/90lkp/README @@ -0,0 +1,5 @@ +dracut lkp module + +This module is to deploy lkp module into rootfs. +Module lkp(Linux Kernel Performance) is a test framework, which is used +to make self-defined test for os kernel. -- 2.23.0
1 0
0 0
[PATCH v3 compass-ci 1/4] container/dracut-initrd: add module 90lkp/module-setup.sh
by Xu Xijian 02 Mar '21

02 Mar '21
90lkp module a module to deploy lkp module into rootfs. module-setup.sh is a base setup script for every kernel module. Signed-off-by: Xu Xijian <hdxuxijian(a)163.com> --- .../modules.d/90lkp/module-setup.sh | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100755 container/dracut-initrd/modules.d/90lkp/module-setup.sh diff --git a/container/dracut-initrd/modules.d/90lkp/module-setup.sh b/container/dracut-initrd/modules.d/90lkp/module-setup.sh new file mode 100755 index 0000000..c188600 --- /dev/null +++ b/container/dracut-initrd/modules.d/90lkp/module-setup.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +check() { + return 0 +} + +depends() { + # We do not depend on any modules - just some root + return 0 +} + +# called by dracut +installkernel() { + return 0 +} + +install() { + inst_hook pre-pivot 10 "$moddir/lkp-deploy.sh" +} -- 2.23.0
1 0
0 0
  • ← Newer
  • 1
  • ...
  • 101
  • 102
  • 103
  • 104
  • 105
  • 106
  • 107
  • ...
  • 524
  • Older →

HyperKitty Powered by HyperKitty