Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge branch 'openstack-os-build' into release/essex-hack/openstack-o…

…s-build
  • Loading branch information...
commit 4c7aca0766db32d1d006b44cd7edb0cbfdd12dad 2 parents 49cbb07 + 0d109a7
@VictorLowther VictorLowther authored
View
30 dev
@@ -80,9 +80,9 @@ trap - 0 INT QUIT TERM
if [[ $DEV_GITHUB_PASSWD ]]; then
debug "Migrating Github password information to $HOME/.netrc"
for mach in github.com api.github.com; do
- grep -q "^$mach" "$HOME/.netrc" &>/dev/null && continue
- printf "\nmachine %s login %s password %s\n" \
- "$mach" "$DEV_GITHUB_ID" "$DEV_GITHUB_PASSWD" >> "$HOME/.netrc"
+ grep -q "^$mach" "$HOME/.netrc" &>/dev/null && continue
+ printf "\nmachine %s login %s password %s\n" \
+ "$mach" "$DEV_GITHUB_ID" "$DEV_GITHUB_PASSWD" >> "$HOME/.netrc"
done
chmod 600 "$HOME/.netrc"
sed -ie 's/DEV_GITHUB_PASSWD=.*//' "$HOME/.build-crowbar.conf"
@@ -489,7 +489,7 @@ scrub_merged_pull_requests() {
git_config_has remote.personal.url || return
while read ref br; do
case $br in
- refs/heads/*)
+ refs/heads/*)
ref=${br#refs/heads/}
heads[${ref//\//-}]+="$br ";;
refs/remotes/personal/pull-req-*)
@@ -707,7 +707,7 @@ setup() {
die "Crowbar repo must be clean before trying to set things up!"
# Make sure we have Github login credentials as the very first thing.
[[ $DEV_GITHUB_ID ]] || {
- local DEV_GITHUB_PASSWD
+ local DEV_GITHUB_PASSWD
read -p "Enter your Github username: " DEV_GITHUB_ID
curl_and_res -f \
"https://api.github.com/users/$DEV_GITHUB_ID" &>/dev/null || \
@@ -724,12 +724,12 @@ setup() {
echo "Unable to authenticate as Github user $DEV_GITHUB_ID." >&2
die "Please try again when you have Github access."
}
- for mach in github.com api.github.com; do
- grep -q "^$mach" "$HOME/.netrc" &>/dev/null && continue
- printf "\nmachine %s login %s password %s\n" \
- "$mach" "$DEV_GITHUB_ID" "$DEV_GITHUB_PASSWD" >> "$HOME/.netrc"
- done
- chmod 600 "$HOME/.netrc"
+ for mach in github.com api.github.com; do
+ grep -q "^$mach" "$HOME/.netrc" &>/dev/null && continue
+ printf "\nmachine %s login %s password %s\n" \
+ "$mach" "$DEV_GITHUB_ID" "$DEV_GITHUB_PASSWD" >> "$HOME/.netrc"
+ done
+ chmod 600 "$HOME/.netrc"
printf "DEV_GITHUB_ID=%q\n" "$DEV_GITHUB_ID" >> "$HOME/.build-crowbar.conf"
}
# We have a baked-in assumption that Github is our origin, and that
@@ -1294,7 +1294,7 @@ ripple_changes_out() {
# repository, and then update the branches for the current release according
# to the branch structure defined in DEV_BRANCHES.
sync_everything() {
- local unsynced_barclamps=()
+ local unsynced_barclamps=()
local b u head res=0 ref branch rel allow_remote_rebase=true
crowbar_is_clean || exit 1
# Do barclamps first.
@@ -1385,13 +1385,13 @@ Command line options:
You can run a fetch at any time without disturbing your working
code, provided you have network connectivity.
If you pass the name of a remote, fetch will try to fetch changes
- from that remote in the main Crowbar repository and in each of the
+ from that remote in the main Crowbar repository and in each of the
barclamps
remote -- Manage remotes across all dellcloudedge repositories. It has the
following subcommands:
add <remote> <urlbase> : The add subcommand adds a new remote to
- the Crowbar repositories.
+ the Crowbar repositories.
rm <remote> : The rm subcommand removes an already-existing remote
from the Crowbar repositories.
set-url <remote> <urlbase> : The set-url subcommands allows you to
@@ -1746,7 +1746,7 @@ pull_requests_gen() {
[[ ${br_pulls["$br"]} ]] || continue
in_repo git_push personal "${br}:${br_pulls[$br]}"
local br_head="$DEV_GITHUB_ID:${br_pulls[$br]}"
- if [[ $br = feature/* ]]; then
+ if [[ $br = feature/* ]]; then
br="$(best_parent_prefix)${br##*/}" || exit 1
fi
do_pull_request \
View
75 extra/barclamp_install.rb
@@ -19,15 +19,47 @@
# the 1st choice is to use the code from the framework since it is most up to date
# however, that code is not always available when installing
require '/opt/dell/bin/barclamp_mgmt_lib.rb'
+require 'getoptlong'
+require 'pp'
-tmpdir="/tmp/bc_install-#{Process.pid}-#{Kernel.rand(65535)}"
-Dir.mkdir(tmpdir)
+opts = GetoptLong.new(
+ [ '--help', '-h', GetoptLong::NO_ARGUMENT ],
+ [ '--debug', '-d', GetoptLong::NO_ARGUMENT ],
+ [ '--force', '-f', GetoptLong::NO_ARGUMENT ]
+)
+
+def usage()
+ puts "Usage:"
+ puts "#{__FILE__} [--help] [--debug] /path/to/new/barclamp"
+ exit
+end
+
+debug = false
force_install = false
+
+opts.each do |opt, arg|
+ case opt
+ when "--help"
+ usage
+ when "--debug"
+ puts "DEBUG: debug mode is enabled"
+ debug = true
+ when "--force"
+ force_install = true
+ end
+end
+
+usage if ARGV.length < 1
+
+tmpdir = "/tmp/bc_install-#{Process.pid}-#{Kernel.rand(65535)}"
+puts "DEBUG: tarball tmpdir: #{tmpdir}" if debug
+Dir.mkdir(tmpdir)
candidates = Array.new
ARGV.each do |src|
+ puts "DEBUG: src: #{src}" if debug
case
- when /tar\.gz$/ =~ src
+ when /tar\.gz|tgz$/ =~ src
# This might be a barclamp tarball. Expand it into a temporary location.
src=File.expand_path(src)
system "tar xzf \"#{src}\" -C \"#{tmpdir}\""
@@ -42,16 +74,24 @@
candidates << File.expand_path(src)
when File.exists?(File.join("/opt","dell","barclamps",src,"crowbar.yml"))
candidates << File.join("/opt","dell","barclamps",src)
- when src == "--force" then force_install = true
else
puts "#{src} is not a barclamp, ignoring."
end
end
+puts "DEBUG: checking candidates: #{candidates.to_s}" if debug
+
barclamps = Hash.new
candidates.each do |bc|
# We have already verified that each of the candidates has crowbar.yml
- barclamp = YAML.load_file File.join(bc,"crowbar.yml")
+ begin
+ puts "DEBUG: trying to parse crowbar.yml" if debug
+ barclamp = YAML.load_file File.join(bc,"crowbar.yml")
+ rescue
+ puts "Exception occured while parsing crowbar.yml in #{bc}, skiping"
+ next
+ end
+
if barclamp["barclamp"] and barclamp["barclamp"]["name"]
name = barclamp["barclamp"]["name"]
else
@@ -64,16 +104,23 @@
order = barclamp["crowbar"]["order"].to_i
end
barclamps[name] = { :src => bc, :name => name, :order => order, :yaml => barclamp }
+ puts "DEBUG: barclamp[#{name}] = #{barclamps[name].pretty_inspect}" if debug
end
+puts "DEBUG: installing barclamps:" if debug
barclamps.values.sort_by{|v| v[:order]}.each do |bc|
+ puts "DEBUG: bc = #{bc.pretty_inspect}" if debug
begin
unless /^\/opt\/dell\/barclamps\// =~ bc[:src]
target="/opt/dell/barclamps/#{bc[:src].split("/")[-1]}"
if File.directory? target
+ puts "DEBUG: target directory #{target} exists" if debug
if File.exists? "#{target}/crowbar.yml"
+ puts "DEBUG: #{target}/crowbar.yml file exists" if debug
if File.exists? "#{target}/sha1sums"
+ puts "DEBUG: #{target}/sha1sums file exists" if debug
unless force_install or system "cd \"#{target}\"; sha1sum --status -c sha1sums"
+ puts "DEBUG: force_install mode is disabled and not all file checksums do match" if debug
puts "Refusing to install over non-pristine target #{target}"
puts "Please back up the following files:"
system "cd \"#{target}\"; sha1sum -c sha1sums |grep -v OK"
@@ -81,26 +128,33 @@
puts " cd \"#{target}\"; find -type f -not -name sha1sums -print0 | \\"
puts" xargs -0 sha1sum -b >sha1sums"
puts "(or use the --force switch)"
+ puts "DEBUG: temporary directory #{tmpdir} will be removed if it exists" if debug
system "rm -rf #{tmpdir}" if File.directory?(tmpdir)
exit -1
end
elsif not force_install
+ puts "DEBUG: force_install mode is disabled and #{target}/sha1sums file does not exist" if debug
puts "#{target} already exists, but it does not have checksums."
puts "Please back up any local changes you may have made, and then"
puts "create a checksums file with:"
puts " cd \"#{target}\"; find -type f -not -name sha1sums -print0 | \\"
puts" xargs -0 sha1sum -b >sha1sums"
puts "(or use the --force switch)"
+ puts "DEBUG: temporary directory #{tmpdir} will be removed if it exists" if debug
system "rm -rf #{tmpdir}" if File.directory?(tmpdir)
exit -1
end
else
+ puts "DEBUG: #{target}/crowbar.yml does not exist" if debug
puts "#{target} exists, but it is not a barclamp."
puts "Cowardly refusing to overwrite it."
+ puts "DEBUG: temporary directory #{tmpdir} will be removed if it exists" if debug
system "rm -rf #{tmpdir}" if File.directory?(tmpdir)
exit -1
end
else
+ puts "DEBUG: target directory \"#{target}\" does not exist" if debug
+ puts "DEBUG: creating directory \"#{target}\"" if debug
system "mkdir -p \"#{target}\""
end
# Only rsync over the changes if this is a different install
@@ -108,17 +162,26 @@
unless File.exists?("#{bc[:src]}/sha1sums") and \
File.exists?("#{target}/sha1sums") and \
system "/bin/bash -c 'diff -q <(sort \"#{bc[:src]}/sha1sums\") <(sort \"#{target}/sha1sums\")'"
+ puts "DEBUG: syncing \"#{bc[:src]}\" directory and \"#{target}\" directory" if debug
system "rsync -r \"#{bc[:src]}/\" \"#{target}\""
end
bc[:src] = target
end
- bc_install bc[:name],bc[:src],bc[:yaml]
+ puts "DEBUG: installing barclamp" if debug
+ begin
+ bc_install bc[:name], bc[:src], bc[:yaml], :debug => debug
+ rescue Exception => e
+ puts "DEBUG: exception occured while installing barclamp" if debug
+ raise e
+ end
rescue
puts "Install of #{bc[:name]} failed."
+ puts "DEBUG: temporary directory #{tmpdir} will be removed if it exists" if debug
system "rm -rf #{tmpdir}" if File.directory?(tmpdir)
exit -3
end
end
+puts "DEBUG: temporary directory #{tmpdir} will be removed if it exists" if debug
system "rm -rf #{tmpdir}" if File.directory?(tmpdir)
exit 0
View
188 extra/barclamp_mgmt_lib.rb
@@ -21,6 +21,7 @@
require 'json'
require 'fileutils'
require 'active_support/all'
+require 'pp'
MODEL_SUBSTRING_BASE = '==BC-MODEL=='
MODEL_SUBSTRING_CAMEL = '==^BC-MODEL=='
@@ -41,37 +42,41 @@
DEBUG = ENV['DEBUG'] === "true"
# entry point for scripts
-def bc_install(bc, path, barclamp)
+def bc_install(bc, path, barclamp, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
case barclamp["crowbar"]["layout"].to_i
when 1
- puts "Installing app components" if DEBUG
- bc_install_layout_1_app bc, path, barclamp
- puts "Installing chef components" if DEBUG
- bc_install_layout_1_chef bc, path, barclamp
- puts "Installing cache components" if DEBUG
- bc_install_layout_1_cache bc, path, barclamp
+ puts "DEBUG: Installing app components" if debug
+ bc_install_layout_1_app bc, path, barclamp, :debug => debug
+ puts "DEBUG: Installing chef components" if debug
+ bc_install_layout_1_chef bc, path, barclamp, :debug => debug
+ puts "DEBUG: Installing cache components" if debug
+ bc_install_layout_1_cache bc, path, barclamp, :debug => debug
else
throw "ERROR: could not install barclamp #{bc} because #{barclamp["barclamp"]["crowbar_layout"]} is unknown layout."
end
- catalog path
+ catalog path, :debug => debug
end
# regenerate the barclamp catalog (does a complete regen each install)
-def catalog(path)
- puts "Creating catalog in #{path}" if DEBUG
+def catalog(path, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
+ puts "DEBUG: Creating catalog in #{path}" if debug
# create the groups for the catalog - for now, just groups. other catalogs may be added later
cat = { 'barclamps'=>{} }
barclamps = File.join CROWBAR_PATH, 'barclamps'
list = Dir.entries(barclamps).find_all { |e| e.end_with? '.yml'}
# scan the installed barclamps
list.each do |bc_file|
- puts "Loading #{bc_file}" if DEBUG
+ puts "Loading #{bc_file}" if debug
bc = YAML.load_file File.join(barclamps, bc_file)
name = bc['barclamp']['name']
cat['barclamps'][name] = {} if cat['barclamps'][name].nil?
description = bc['barclamp']['description']
if description.nil?
- puts "Trying to find description" if DEBUG
+ puts "Trying to find description" if debug
[ File.join(path, '..', name, 'chef', 'data_bags', 'crowbar', "bc-template-#{name}.json"), \
File.join(path, '..', "barclamp-#{name}", 'chef', 'data_bags', 'crowbar', "bc-template-#{name}.json")].each do |f|
next unless File.exist? f
@@ -81,7 +86,7 @@ def catalog(path)
end
end
# template = File.join path, name,
- puts "Adding catalog info for #{bc['barclamp']['name']}" if DEBUG
+ puts "Adding catalog info for #{bc['barclamp']['name']}" if debug
cat['barclamps'][name]['description'] = description || "No description for #{bc['barclamp']['name']}"
cat['barclamps'][name]['user_managed'] = (bc['barclamp']['user_managed'].nil? ? true : bc['barclamp']['user_managed'])
puts "#{name} #{bc['barclamp']['user_managed']}" if name === 'dell-branding'
@@ -104,26 +109,34 @@ def catalog(path)
end
# copies paths from one place to another (recursive)
-def bc_cloner(item, bc, entity, source, target, replace)
+def bc_cloner(item, bc, entity, source, target, replace, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
+ puts "DEBUG: bc_cloner method called with debug option enabled" if debug
+ puts "DEBUG: bc_cloner args: item=#{item}, bc=#{bc}, entity=#{entity}, source=#{source}, target=#{target}, replace=#{replace}" if debug
+
files = []
- new_item = (replace ? bc_replacer(item, bc, entity) : item)
+ new_item = (replace ? bc_replacer(item, bc, entity, :debug => debug) : item)
+ puts "DEBUG: new_item=#{new_item}" if debug
new_file = File.join target, new_item
+ puts "DEBUG: new_file=#{new_file}" if debug
new_source = File.join(source, item)
+ puts "DEBUG: new_source=#{new_source}" if debug
if File.directory? new_source
- puts "\tcreating directory #{new_file}." if DEBUG
+ puts "DEBUG: \tcreating directory #{new_file}." if debug
FileUtils.mkdir new_file unless File.directory? new_file
clone = Dir.entries(new_source).find_all { |e| !e.start_with? '.'}
clone.each do |recurse|
- files += bc_cloner(recurse, bc, entity, new_source, new_file, replace)
+ files += bc_cloner(recurse, bc, entity, new_source, new_file, replace, :debug => debug)
end
else
#need to inject into the file
unless replace
- puts "\t\tcopying file #{new_file}." if DEBUG
+ puts "DEBUG: \t\tcopying file #{new_file}." if debug
FileUtils.cp new_source, new_file
files << new_file
else
- puts "\t\tcreating file #{new_file}." if DEBUG
+ puts "DEBUG: \t\tcreating file #{new_file}." if debug
t = File.open(new_file, 'w')
File.open(new_source, 'r') do |f|
s = f.read
@@ -137,13 +150,15 @@ def bc_cloner(item, bc, entity, source, target, replace)
end
# fix permissions
-def chmod_dir(value, path)
+def chmod_dir(value, path, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
f = Dir.entries(path).find_all { |e| !e.start_with? '.'}
f.each do |i|
file = File.join(path,i)
if File.exists? file
FileUtils.chmod value, file
- puts "\tchmod 0#{value.to_s(8)} for #{file}" if DEBUG
+ puts "DEBUG: \tchmod 0#{value.to_s(8)} for #{file}" if debug
else
puts "WARN: missing file #{file} for chmod #{value} operation."
end
@@ -151,23 +166,31 @@ def chmod_dir(value, path)
end
# remove model placeholders
-def bc_replacer(item, bc, entity)
+def bc_replacer(item, bc, entity, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
+ puts "DEBUG: bc_replacer method called with debug option enabled" if debug
+ puts "DEBUG: bc_replacer args: item=#{item}, bc=#{bc}, entity=#{entity}" if debug
+
item = item.gsub(MODEL_SUBSTRING_BASE, bc)
item = item.gsub(MODEL_SUBSTRING_CAMEL, bc.camelize)
item = item.gsub(MODEL_SUBSTRING_HUMAN, bc.humanize)
item = item.gsub(MODEL_SUBSTRING_CAPSS, bc.capitalize)
item = item.gsub('Copyright 2011, Dell', "Copyright #{Time.now.year}, #{entity}")
+ puts "DEBUG: bc_replacer returns item=#{item}" if debug
return item
end
#merges localizations from config into the matching translation files
-def merge_i18n(barclamp)
+def merge_i18n(barclamp, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
locales = barclamp['locale_additions']
locales.each do |key, value|
#translation file (can be multiple)
f = File.join CROWBAR_PATH, 'config', 'locales', "#{key}.yml"
if File.exist? f
- puts "merging translation for #{f}" if DEBUG
+ puts "merging translation for #{f}" if debug
master = YAML.load_file f
master = merge_tree(key, value, master)
File.open( f, 'w' ) do |out|
@@ -180,8 +203,9 @@ def merge_i18n(barclamp)
end
# makes sure that sass overrides are injected into the application.sass
-def merge_sass(barclamp, bc, path, installing)
- debug = DEBUG
+def merge_sass(barclamp, bc, path, installing, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
sass_path = File.join path, 'crowbar_framework', 'public', 'stylesheets', 'sass'
application_sass = File.join CROWBAR_PATH, 'public', 'stylesheets', 'sass', 'application.sass'
if File.exist? application_sass and File.exists? sass_path
@@ -200,14 +224,14 @@ def merge_sass(barclamp, bc, path, installing)
barclamp['application_sass']['remove'].each do |item|
if installing and sapp.include? item
sapp.delete item
- puts "removing '#{item}' from application.sass based on crowbar.yml" if debug
+ puts "DEBUG: removing '#{item}' from application.sass based on crowbar.yml" if debug
elsif !installing and !sapp.include? item
if top>0
sapp.insert top, item
else
sapp << item
end
- puts "restoring '#{item}' to application.sass based on crowbar.yml in position #{top}" if debug
+ puts "DEBUG: restoring '#{item}' to application.sass based on crowbar.yml in position #{top}" if debug
end
end unless barclamp['application_sass'].nil? or barclamp['application_sass']['remove'].nil?
# scan the sass files from the barclamp
@@ -220,27 +244,29 @@ def merge_sass(barclamp, bc, path, installing)
else
sapp << entry
end
- puts "adding '#{entry}' to application.sass for #{sf} in position #{top}" if debug
+ puts "DEBUG: adding '#{entry}' to application.sass for #{sf} in position #{top}" if debug
# when uninstalling, remove from applicaiton
elsif !installing
sapp.delete entry
- puts "removing '#{entry}' from application.sass for #{sf}" if debug
+ puts "DEBUG: removing '#{entry}' from application.sass for #{sf}" if debug
end
end
# write the new application sass
File.open(application_sass, 'w' ) do |out|
out.puts sapp
end
- framework_permissions bc, path
+ framework_permissions bc, path, :debug => debug
puts "updated #{application_sass}" if debug
else
- puts "NOTE: skipping application sass update, #{application_sass} not found" if debug
+ puts "DEBUG: NOTE: skipping application sass update, #{application_sass} not found" if debug
end
end
# injects/cleans barclamp items from framework navigation
-def merge_nav(barclamp, installing)
+def merge_nav(barclamp, installing, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
unless barclamp['nav'].nil?
bc_flag = "#FROM BARCLAMP: #{barclamp['barclamp']['name']}."
# get raw file
@@ -281,7 +307,9 @@ def merge_nav(barclamp, installing)
end
# helper for localization merge
-def merge_tree(key, value, target)
+def merge_tree(key, value, target, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
if target.key? key
if target[key].class == Hash
value.each do |k, v|
@@ -289,18 +317,20 @@ def merge_tree(key, value, target)
target[key] = merge_tree(k, v, target[key])
end
else
- puts "replaced key #{key} value #{value}" if DEBUG
+ puts "DEBUG: replaced key #{key} value #{value}" if debug
target[key] = value
end
else
- puts "added key #{key} value #{value}" if DEBUG
+ puts "DEBUG: added key #{key} value #{value}" if debug
target[key] = value
end
return target
end
# cleanup (anti-install) assumes the install generates a file list
-def bc_remove_layout_1(bc, path, barclamp)
+def bc_remove_layout_1(bc, path, barclamp, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
filelist = File.join BARCLAMP_PATH, "#{bc}-filelist.txt"
if File.exist? filelist
files = [ filelist ]
@@ -310,21 +340,25 @@ def bc_remove_layout_1(bc, path, barclamp)
FileUtils.rm files
merge_nav barclamp, false
merge_sass barclamp, bc, path, false
- puts "Barclamp #{bc} UNinstalled" if DEBUG
+ puts "DEBUG: Barclamp #{bc} UNinstalled" if debug
end
end
-def framework_permissions(bc, path)
+def framework_permissions(bc, path, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
FileUtils.chmod 0755, File.join(CROWBAR_PATH, 'db')
chmod_dir 0644, File.join(CROWBAR_PATH, 'db')
FileUtils.chmod 0755, File.join(CROWBAR_PATH, 'tmp')
chmod_dir 0644, File.join(CROWBAR_PATH, 'tmp')
FileUtils.chmod_R 0755, File.join(CROWBAR_PATH, 'public', 'stylesheets')
- puts "\tcopied crowbar_framework files" if DEBUG
+ puts "DEBUG: \tcopied crowbar_framework files" if debug
end
# install the framework files for a barclamp
-def bc_install_layout_1_app(bc, path, barclamp)
+def bc_install_layout_1_app(bc, path, barclamp, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
#TODO - add a roll back so there are NOT partial results if a step fails
files = []
@@ -333,31 +367,39 @@ def bc_install_layout_1_app(bc, path, barclamp)
#copy the rails parts (required for render BEFORE import into chef)
dirs = Dir.entries(path)
+ puts "DEBUG: path entries #{dirs.pretty_inspect}" if debug
if dirs.include? 'crowbar_framework'
- files += bc_cloner('crowbar_framework', bc, nil, path, BASE_PATH, false)
- framework_permissions bc, path
+ puts "DEBUG: path entries include \"crowbar_framework\"" if debug
+ files += bc_cloner('crowbar_framework', bc, nil, path, BASE_PATH, false, :debug => debug)
+ framework_permissions bc, path, :debug => debug
end
#merge i18n information (least invasive operations first)
- merge_i18n barclamp
- merge_nav barclamp, true
- merge_sass barclamp, bc, path, true
+ puts "DEBUG: merge_i18n" if debug
+ merge_i18n barclamp, :debug => debug
+ puts "DEBUG: merge_nav" if debug
+ merge_nav barclamp, true, :debug => debug
+ puts "DEBUG: merge_sass" if debug
+ merge_sass barclamp, bc, path, true, :debug => debug
if dirs.include? 'bin'
- files += bc_cloner('bin', bc, nil, path, BASE_PATH, false)
+ puts "DEBUG: path entries include \"bin\"" if debug
+ files += bc_cloner('bin', bc, nil, path, BASE_PATH, false, :debug => debug)
FileUtils.chmod_R 0755, BIN_PATH
- puts "\tcopied command line files" if DEBUG
+ puts "DEBUG: \tcopied command line files" if debug
end
if dirs.include? 'updates'
- files += bc_cloner('updates', bc, nil, path, ROOT_PATH, false)
+ puts "DEBUG: path entries include \"updates\"" if debug
+ files += bc_cloner('updates', bc, nil, path, ROOT_PATH, false, :debug => debug)
FileUtils.chmod_R 0755, UPDATE_PATH
- puts "\tcopied updates files" if DEBUG
+ puts "DEBUG: \tcopied updates files" if debug
end
# copy all the files to the target
if dirs.include? 'chef'
- files += bc_cloner('chef', bc, nil, path, BASE_PATH, false)
- puts "\tcopied over chef parts from #{path} to #{BASE_PATH}" if DEBUG
+ puts "DEBUG: path entries include \"chef\"" if debug
+ files += bc_cloner('chef', bc, nil, path, BASE_PATH, false, :debug => debug)
+ puts "DEBUG: \tcopied over chef parts from #{path} to #{BASE_PATH}" if debug
end
# Migrate base crowbar schema if needed
@@ -394,17 +436,19 @@ def bc_install_layout_1_app(bc, path, barclamp)
FileUtils.mkdir yml_path unless File.directory? yml_path
FileUtils.cp yml_barclamp, File.join(yml_path, "#{bc}.yml")
- puts "Barclamp #{bc} (format v1) added to Crowbar Framework. Review #{filelist} for files created." if DEBUG
+ puts "DEBUG: Barclamp #{bc} (format v1) added to Crowbar Framework. Review #{filelist} for files created." if debug
end
# upload the chef parts for a barclamp
-def bc_install_layout_1_chef(bc, path, barclamp)
+def bc_install_layout_1_chef(bc, path, barclamp, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
log_path = File.join '/var', 'log', 'barclamps'
FileUtils.mkdir log_path unless File.directory? log_path
log = File.join log_path, "#{bc}.log"
system "date >> #{log}"
- puts "Capturing chef install logs to #{log}" if DEBUG
+ puts "DEBUG: Capturing chef install logs to #{log}" if debug
chef = File.join path, 'chef'
cookbooks = File.join chef, 'cookbooks'
databags = File.join chef, 'data_bags'
@@ -418,9 +462,9 @@ def bc_install_layout_1_chef(bc, path, barclamp)
puts "\t#{path} #{knife_cookbook} upload failed. Examine #{log} for more into"
exit 1
end
- puts "\texecuted: #{path} #{knife_cookbook}" if DEBUG
+ puts "DEBUG: \texecuted: #{path} #{knife_cookbook}" if debug
else
- puts "\tNOTE: could not find cookbooks #{cookbooks}" if DEBUG
+ puts "DEBUG: \tNOTE: could not find cookbooks #{cookbooks}" if debug
end
#upload the databags
@@ -436,7 +480,7 @@ def bc_install_layout_1_chef(bc, path, barclamp)
puts "\t#{knife_bag} failed. Examine #{log} for more information."
exit 1
end
- puts "\texecuted: #{path} #{knife_bag}" if DEBUG
+ puts "\texecuted: #{path} #{knife_bag}" if debug
json = Dir.entries(bag_path).find_all { |r| r.end_with?(".json") }
json.each do |bag_file|
@@ -445,11 +489,11 @@ def bc_install_layout_1_chef(bc, path, barclamp)
puts "\t#{knife_databag} failed. Examine #{log} for more information."
exit 1
end
- puts "\texecuted: #{path} #{knife_databag}" if DEBUG
+ puts "DEBUG: \texecuted: #{path} #{knife_databag}" if debug
end
end
else
- puts "\tNOTE: could not find databags #{databags}" if DEBUG
+ puts "DEBUG: \tNOTE: could not find databags #{databags}" if debug
end
#upload the roles
@@ -461,40 +505,42 @@ def bc_install_layout_1_chef(bc, path, barclamp)
puts "\t#{knife_role} failed. Examine #{log} for more information."
exit 1
end
- puts "\texecuted: #{path} #{knife_role}" if DEBUG
+ puts "DEBUG: \texecuted: #{path} #{knife_role}" if debug
end
else
- puts "\tNOTE: could not find roles #{roles}" if DEBUG
+ puts "DEBUG: \tNOTE: could not find roles #{roles}" if debug
end
puts "Barclamp #{bc} (format v1) Chef Components Uploaded."
end
-def bc_install_layout_1_cache(bc,path,barclamp)
+def bc_install_layout_1_cache(bc, path, barclamp, options={})
+ options = {:debug => false}.merge! options
+ debug = options[:debug] or ENV['DEBUG'] === "true"
return unless File.directory?(File.join(path,"cache"))
Dir.entries(File.join(path,"cache")).each do |ent|
- puts ent.inspect if DEBUG
+ puts ent.inspect if debug
case
when ent == "files"
- puts "Copying files" if DEBUG
+ puts "Copying files" if debug
system "cp -r \"#{path}/cache/#{ent}\" /tftpboot"
when ent == "gems"
# Symlink the gems into One Flat Directory.
- puts "Installing gems" if DEBUG
+ puts "Installing gems" if debug
Dir.entries("#{path}/cache/gems").each do |gem|
next unless /\.gem$/ =~ gem
unless File.directory? "/tftpboot/gemsite/gems"
system "mkdir -p /tftpboot/gemsite/gems"
end
unless File.symlink? "/tftpboot/gemsite/gems/#{gem}"
- puts "Symlinking #{path}/cache/gems/#{gem} into /tftpboot/gemsite/gems" if DEBUG
+ puts "DEBUG: Symlinking #{path}/cache/gems/#{gem} into /tftpboot/gemsite/gems" if debug
File.symlink "#{path}/cache/gems/#{gem}", "/tftpboot/gemsite/gems/#{gem}"
end
end
- puts "Done" if DEBUG
+ puts "DEBUG: Done" if debug
when File.directory?("#{path}/cache/#{ent}/pkgs")
- puts "Installing packages" if DEBUG
+ puts "Installing packages" if debug
# We have actual packages here. They map into the target like so:
# path/ent/pkgs -> /tftboot/ent/crowbar-extras/bc
unless File.directory? "/tftpboot/#{ent}/crowbar-extra/"
@@ -502,11 +548,11 @@ def bc_install_layout_1_cache(bc,path,barclamp)
end
# sigh, ubuntu-install and redhat-install.
unless File.symlink? "/tftpboot/#{ent}/crowbar-extra/#{path.split('/')[-1]}"
- puts "Symlinking #{path}/cache/#{ent}/pkgs into /tftpboot/#{ent}/crowbar-extra" if DEBUG
+ puts "DEBUG: Symlinking #{path}/cache/#{ent}/pkgs into /tftpboot/#{ent}/crowbar-extra" if debug
File.symlink "#{path}/cache/#{ent}/pkgs", "/tftpboot/#{ent}/crowbar-extra/#{path.split('/')[-1]}"
end
end
- puts "Done" if DEBUG
+ puts "DEBUG: Done" if debug
true
end
end
View
7 redhat-common/build_lib.sh
@@ -28,9 +28,10 @@ chroot_install() {
# Fetch (but do not install) packages into the chroot environment
chroot_fetch() {
- if [[ $1 ]]; then
- in_chroot /usr/bin/yum -y --downloadonly install "$@" || :
- fi
+ local p
+ for p in "$@"; do
+ in_chroot /usr/bin/yum -y --downloadonly install "$p" || :
+ done
in_chroot /usr/bin/yum -y update
}
View
57 run-installer.sh
@@ -0,0 +1,57 @@
+#!/bin/bash
+# Copyright 2011, Dell
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This script expects to be able to run certian commands as root.
+# Either run it as a user who can sudo to root, or give the user
+# you are running it as the following sudo rights:
+# crowbar-tester ALL = NOPASSWD: /bin/mount, /bin/umount, /bin/ip, /usr/sbin/brctl, /home/crowbar-tester/test_framework/make-cgroups.sh
+
+# We use bash4 specific functionality (hash tables), and cgroups to
+# make sure we clean up everything when we exit a test run.
+# You will need a fairly recent Linux distro to run this test --
+# RHEL/CentOS 5 will not work without some significant rework.
+
+readonly currdir="$PWD"
+
+# Location of the Crowbar checkout we are building from.
+[[ $CROWBAR_DIR ]] || CROWBAR_DIR="${0%/*}"
+[[ $CROWBAR_DIR = /* ]] || CROWBAR_DIR="$currdir/$CROWBAR_DIR"
+[[ -f $CROWBAR_DIR/build_crowbar.sh && -d $CROWBAR_DIR/.git ]] || \
+ die "$CROWBAR_DIR is not a git checkout of Crowbar!"
+export CROWBAR_DIR
+
+. "$CROWBAR_DIR/build_lib.sh"
+. "$CROWBAR_DIR/test_lib.sh"
+
+SMOKETEST_BRIDGES=(deploy-br)
+NICS_PER_BRIDGE=1
+
+
+[[ -f $1 && $1 = *Crowbar_Installer.vmdk ]] || \
+ die "$1 is not a crowbar installer disk image!"
+[[ -f $2 && $2 = *crowbar-*.iso ]] || \
+ die "$2 is not a croebar ISO!"
+[[ -d /sys/class/net/$3 ]] || \
+ die "$3 is not an interface to let the installer serve PXE boots!"
+PHYSICAL_INTERFACES=("$3,deploy-br")
+make_virt_net
+
+kvm -drive "file=$1,if=ide,media=disk,snapshot=on" \
+ -cdrom "$2" \
+ -net "nic,macaddr=52:54:00:00:00:8f,model=e1000" \
+ -net "tap,ifname=admin-0-br,script=no,downscript=no" \
+ -m 512 -snapshot
+kill_virt_net
View
18 test_framework/network-team.json
@@ -185,26 +185,26 @@
},
"bmc": {
"conduit": "bmc",
- "vlan": 100,
- "use_vlan": false,
+ "vlan": 600,
+ "use_vlan": true,
"add_bridge": false,
- "subnet": "192.168.124.0",
+ "subnet": "192.168.128.0",
"netmask": "255.255.255.0",
- "broadcast": "192.168.124.255",
+ "broadcast": "192.168.128.255",
"ranges": {
- "host": { "start": "192.168.124.162", "end": "192.168.124.240" }
+ "host": { "start": "192.168.128.10", "end": "192.168.128.254" }
}
},
"bmc_vlan": {
"conduit": "intf2",
- "vlan": 100,
+ "vlan": 600,
"use_vlan": true,
"add_bridge": false,
- "subnet": "192.168.124.0",
+ "subnet": "192.168.128.0",
"netmask": "255.255.255.0",
- "broadcast": "192.168.124.255",
+ "broadcast": "192.168.128.255",
"ranges": {
- "host": { "start": "192.168.124.161", "end": "192.168.124.161" }
+ "host": { "start": "192.168.128.2", "end": "192.168.128.9" }
}
},
"admin": {
View
35 test_lib.sh
@@ -22,7 +22,7 @@ declare -a SMOKETEST_VLANS
SMOKETEST_VLANS[200]="192.168.125.1/24"
SMOKETEST_VLANS[300]="192.168.126.1/24"
SMOKETEST_VLANS[500]="192.168.127.1/24"
-
+SMOKETEST_VLANS[600]="192.168.128.1/24"
# THis lock is held whenever we are running tests. It exists to
# prevent multiple instances of the smoketest from running at once.
@@ -69,6 +69,7 @@ NICS_PER_BRIDGE=2
# it can create and destroy vlans as needed.
# Each entry in this array is if the form ifname,bridgename
# PHYSICAL_INTERFACES=(eth1,crowbar-pub)
+PHYSICAL_INTERFACES=()
# An array of MAC addresses of the primary interfaces of the physical machines.
# We need to have this information beforehand so that we can send
@@ -526,8 +527,19 @@ run_kvm() {
cpu_count=4
mem_size=4G
fi
- if kvm -device \? 2>&1 |grep -q ahci && [[ $(kvm -version) =~ kvm-1 ]]; then
- local kvm_use_ahci=true
+ # Hack to pick the fastest disk caching mode.
+ # We use unsafe caching if we can on the vms because we will just
+ # rebuild the filesystems from scratch if anything goes wrong.
+ if ! [[ $drive_cache ]]; then
+ if kvm --help |grep -q 'cache.*unsafe'; then
+ drive_cache=unsafe
+ else
+ drive_cache=writeback
+ fi
+ if kvm -device \? 2>&1 |grep -q ahci && \
+ [[ $(kvm -version) =~ kvm-1 ]]; then
+ kvm_use_ahci=true
+ fi
fi
local vm_gen="$vmname.${kvm_generations[$vmname]}"
# create a new log directory for us. vm_logdir needs to be global
@@ -1080,15 +1092,6 @@ run_test() {
exit 1
done
- # Hack to pick the fastest disk caching mode.
- # We use unsafe caching if we can on the vms because we will just
- # rebuild the filesystems from scratch if anything goes wrong.
- if kvm --help |grep -q 'cache.*unsafe'; then
- drive_cache=unsafe
- else
- drive_cache=writeback
- fi
-
mangle_ssh_config
CGROUP_DIR=$(sudo -n "$(which make_cgroups.sh)" $$ crowbar-test) || \
@@ -1111,6 +1114,14 @@ run_test() {
manual-deploy) local manual_deploy=true;;
use-iso) shift; SMOKETEST_ISO="$1";;
single|dual|team) local network_mode="$1";;
+ bind-nic) shift;
+ [[ -d /sys/class/net/$1 ]] || \
+ die "$1 is not a network interface!"
+ is_in "$2" "${SMOKETEST_BRIDGES[*]}" || \
+ die "$2 is not a bridge of ours!"
+ PHYSICAL_INTERFACES+=("$1,$2")
+ shift;;
+ use-screen) unset DISPLAY;;
scratch);;
*)
if [[ -d $CROWBAR_DIR/barclamps/$1 ]]; then
View
135 ubuntu-common/build_lib.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# This file is sourced by build_crowbar.sh when you want to build Crowbar
+# This file is sourced by build_crowbar.sh when you want to build Crowbar
# using Ubuntu as the base OS. It includes build routines common to all
# Ubuntu distributions (so far).
@@ -21,8 +21,8 @@ fetch_os_iso() {
# Try and download our ISO if we don't already have it
echo "$(date '+%F %T %z'): Downloading and caching $ISO"
curl -o "$ISO_LIBRARY/$ISO" \
- "$ISO_MIRROR/ubuntu-iso/CDs/$OS_VERSION/$ISO" || \
- die 1 "Missing our source image"
+ "$ISO_MIRROR/ubuntu-iso/CDs/$OS_VERSION/$ISO" || \
+ die 1 "Missing our source image"
}
# Have the chroot update its package databases.
@@ -30,23 +30,26 @@ chroot_update() { in_chroot /usr/bin/apt-get -y --force-yes \
--allow-unauthenticated update; }
# Install some packages in the chroot environment.
-chroot_install() {
+chroot_install() {
if [[ $1 ]]; then
- in_chroot /usr/bin/apt-get -y --force-yes \
- --allow-unauthenticated install "$@"
+ in_chroot /usr/bin/apt-get -y --force-yes \
+ --allow-unauthenticated install "$@"
fi
in_chroot /usr/bin/apt-get -y --force-yes \
- --allow-unauthenticated --download-only upgrade
+ --allow-unauthenticated --download-only upgrade
}
# Fetch (but do not install) packages into the chroot environment
chroot_fetch() {
if [[ $1 ]]; then
- in_chroot /usr/bin/apt-get -y --force-yes \
- --allow-unauthenticated --download-only install "$@"
+ local p
+ for p in "$@"; do
+ in_chroot /usr/bin/apt-get -y --force-yes \
+ --allow-unauthenticated --download-only install "$p"
+ done
fi
in_chroot /usr/bin/apt-get -y --force-yes \
- --allow-unauthenticated --download-only upgrade
+ --allow-unauthenticated --download-only upgrade
}
# Add repositories to the local chroot environment.
@@ -54,41 +57,41 @@ add_repos() {
local repo ppas=()
local f=$(mktemp /tmp/ubuntu_repos.XXXXXX)
for repo in "$@"; do
- case $repo in
- ppa*) ppas+=("${ppa#* }");;
- deb*) echo "$repo" >> "$f";;
- *) die "Unknown Debian repository type $repo";;
- esac
+ case $repo in
+ ppa*) ppas+=("${ppa#* }");;
+ deb*) echo "$repo" >> "$f";;
+ *) die "Unknown Debian repository type $repo";;
+ esac
done
in_chroot mkdir -p /etc/apt/sources.list.d
sudo cp "$f" "$CHROOT/etc/apt/sources.list.d/${f##*.}.list"
rm "$f"
[[ $ppas ]] || return 0
- chroot_install python-software-properties
+ chroot_install python-software-properties
for repo in "${ppas[@]}"; do
- in_chroot apt-add-repository "ppa:${repo}"
+ in_chroot apt-add-repository "ppa:${repo}"
done
}
# Test to see we were passed a valid package file name.
is_pkg() { [[ $1 = *.deb ]]; }
-# Look up name and version information for a package using
+# Look up name and version information for a package using
# dpkg. Make sure and memoize things.
dpkg_info() {
# $1 = package to examine
local name arch ver f1 f2
[[ -f $1 && $1 = *.deb ]] || die "$1 is not a debian package!"
if [[ ! ${SEEN_DEBS["${1##*/}"]} ]]; then
- while read f1 f2; do
- case $f1 in
- Package:) name="$f2";;
- Version:) ver="$f2";;
- Architecture:) arch="$f2";;
- esac
- [[ $name && $ver && $arch ]] && break || :
- done < <(dpkg -I "$1")
- SEEN_DEBS["${1##*/}"]="$name-$arch $ver"
+ while read f1 f2; do
+ case $f1 in
+ Package:) name="$f2";;
+ Version:) ver="$f2";;
+ Architecture:) arch="$f2";;
+ esac
+ [[ $name && $ver && $arch ]] && break || :
+ done < <(dpkg -I "$1")
+ SEEN_DEBS["${1##*/}"]="$name-$arch $ver"
fi
echo "${SEEN_DEBS["${1##*/}"]}"
}
@@ -103,7 +106,7 @@ __barclamp_pkg_metadata_needs_update() (
cd "$CACHE_DIR/barclamps/$1/$OS_TOKEN/pkgs"
[[ -f Packages.gz ]] || return 0
while read fname; do
- [[ $fname -nt . ]] && return 0
+ [[ $fname -nt . ]] && return 0
done < <(find . -name '*.deb' -type f)
return 1
)
@@ -112,7 +115,7 @@ __make_barclamp_pkg_metadata() {
in_chroot /bin/bash -c 'cd /mnt; dpkg-scanpackages . 2>/dev/null |gzip -9 >Packages.gz'
sudo chown -R "$(whoami)" "$CACHE_DIR/barclamps/$1/$OS_TOKEN/pkgs"
if [[ $CURRENT_CACHE_BRANCH ]]; then
- in_cache git add "barclamps/$1/$OS_TOKEN/pkgs/Packages.gz"
+ in_cache git add "barclamps/$1/$OS_TOKEN/pkgs/Packages.gz"
fi
}
@@ -122,11 +125,11 @@ add_offline_repos() {
in_chroot mkdir -p /packages/barclamps
local bc
for bc in "${BARCLAMPS[@]}"; do
- [[ -f $CACHE_DIR/barclamps/$bc/$OS_TOKEN/pkgs/Packages.gz ]] || continue
- sudo mkdir -p "$CHROOT/packages/barclamps/$bc"
- sudo mount --bind "$CACHE_DIR/barclamps/$bc/$OS_TOKEN/pkgs" \
- "$CHROOT/packages/barclamps/$bc"
- add_repos "deb file:///packages/barclamps/$bc /"
+ [[ -f $CACHE_DIR/barclamps/$bc/$OS_TOKEN/pkgs/Packages.gz ]] || continue
+ sudo mkdir -p "$CHROOT/packages/barclamps/$bc"
+ sudo mount --bind "$CACHE_DIR/barclamps/$bc/$OS_TOKEN/pkgs" \
+ "$CHROOT/packages/barclamps/$bc"
+ add_repos "deb file:///packages/barclamps/$bc /"
done
sudo mount --bind "$IMAGE_DIR" "$CHROOT/packages/base"
add_repos "deb file:///packages/base $OS_CODENAME main restricted"
@@ -138,8 +141,8 @@ __make_chroot() {
# Ubuntu to ensure that we don't interfere with the host's package cache.
local d repo bc f
sudo debootstrap "$OS_CODENAME" "$CHROOT" \
- "file://$IMAGE_DIR" || \
- die 1 "Could not bootstrap our scratch target!"
+ "file://$IMAGE_DIR" || \
+ die 1 "Could not bootstrap our scratch target!"
# mount some important directories for the chroot
for d in /proc /sys /dev /dev/pts /dev/shm; do
@@ -151,14 +154,14 @@ __make_chroot() {
sudo cp /etc/resolv.conf "$CHROOT/etc/resolv.conf"
# make sure the chroot honors proxies
if [[ $http_proxy || $https_proxy ]]; then
- f=$(mktemp /tmp/apt.http.conf.XXXXXX)
- [[ $http_proxy ]] && echo \
- "Acquire::http::Proxy \"$http_proxy\";" >> "$f"
- [[ $https_proxy ]] && echo \
- "Acquire::https::Proxy \"$https_proxy\";" >> "$f"
- echo "Acquire::http::Proxy::127.0.0.1 \"DIRECT\";" >> "$f"
- in_chroot mkdir -p "/etc/apt/apt.conf.d/"
- sudo cp "$f" "$CHROOT/etc/apt/apt.conf.d/00http_proxy"
+ f=$(mktemp /tmp/apt.http.conf.XXXXXX)
+ [[ $http_proxy ]] && echo \
+ "Acquire::http::Proxy \"$http_proxy\";" >> "$f"
+ [[ $https_proxy ]] && echo \
+ "Acquire::https::Proxy \"$https_proxy\";" >> "$f"
+ echo "Acquire::http::Proxy::127.0.0.1 \"DIRECT\";" >> "$f"
+ in_chroot mkdir -p "/etc/apt/apt.conf.d/"
+ sudo cp "$f" "$CHROOT/etc/apt/apt.conf.d/00http_proxy"
fi
}
@@ -169,7 +172,7 @@ pkg_cmp() {
local deb1="$(dpkg_info "$1")"
local deb2="$(dpkg_info "$2")"
[[ ${deb1%% *} = ${deb2%% *} ]] || \
- die "$1 and $2 do not reference the same package!"
+ die "$1 and $2 do not reference the same package!"
vercmp "${deb1#* }" "${deb2#* }"
}
@@ -188,7 +191,7 @@ final_build_fixups() {
ar x "$udeb"
tar xzf data.tar.gz
rm -rf debian-binary *.tar.gz
- done
+ done
# bnx2x nic drivers require firmware images from the kernel image .deb
ar x "$IMAGE_DIR/pool/main/l/linux/"linux-image-*-generic_*.deb
tar xjf data.tar.bz2 --wildcards './lib/firmware/*/bnx2x/*'
@@ -197,25 +200,25 @@ final_build_fixups() {
debug "Adding USB connected DVD support"
mkdir -p var/lib/dpkg/info
cp "$CROWBAR_DIR/initrd/cdrom-detect.postinst" var/lib/dpkg/info
- debug "Enabling bootif support for debian-installer"
- mkdir -p lib/debian-installer-startup.d/
- [[ -f $CROWBAR_DIR/$OS_TO_STAGE-extra/patches/bootif ]] && {
- cp "$CROWBAR_DIR/$OS_TO_STAGE-extra/patches/bootif" \
- lib/debian-installer-startup.d/S32set-bootif
- chmod 755 "lib/debian-installer-startup.d/S32set-bootif"
- }
- for initrd in "install/initrd.gz" \
- "install/netboot/ubuntu-installer/amd64/initrd.gz"; do
- [[ -f $IMAGE_DIR/$initrd ]] || continue
- mkdir -p "$BUILD_DIR/${initrd%/*}"
- gunzip -c "$IMAGE_DIR/$initrd" >"$BUILD_DIR/initrd.tmp"
- find . | \
- cpio --format newc --owner root:root \
- -oAF "$BUILD_DIR/initrd.tmp"
- cat "$BUILD_DIR/initrd.tmp" | \
- gzip -9 > "$BUILD_DIR/$initrd"
- done
- rm "$BUILD_DIR/initrd.tmp"
+ debug "Enabling bootif support for debian-installer"
+ mkdir -p lib/debian-installer-startup.d/
+ [[ -f $CROWBAR_DIR/$OS_TO_STAGE-extra/patches/bootif ]] && {
+ cp "$CROWBAR_DIR/$OS_TO_STAGE-extra/patches/bootif" \
+ lib/debian-installer-startup.d/S32set-bootif
+ chmod 755 "lib/debian-installer-startup.d/S32set-bootif"
+ }
+ for initrd in "install/initrd.gz" \
+ "install/netboot/ubuntu-installer/amd64/initrd.gz"; do
+ [[ -f $IMAGE_DIR/$initrd ]] || continue
+ mkdir -p "$BUILD_DIR/${initrd%/*}"
+ gunzip -c "$IMAGE_DIR/$initrd" >"$BUILD_DIR/initrd.tmp"
+ find . | \
+ cpio --format newc --owner root:root \
+ -oAF "$BUILD_DIR/initrd.tmp"
+ cat "$BUILD_DIR/initrd.tmp" | \
+ gzip -9 > "$BUILD_DIR/$initrd"
+ done
+ rm "$BUILD_DIR/initrd.tmp"
)
# rm -rf "$BUILD_DIR/initrd"
}
@@ -223,5 +226,5 @@ final_build_fixups() {
# Check to make sure all our prerequisites are met.
for cmd in debootstrap ar; do
which "$cmd" &>/dev/null || \
- die "Please install $cmd before trying to build Crowbar."
+ die "Please install $cmd before trying to build Crowbar."
done

0 comments on commit 4c7aca0

Please sign in to comment.
Something went wrong with that request. Please try again.