forked from theforeman/smart_proxy_ansible
-
Notifications
You must be signed in to change notification settings - Fork 0
/
ansible_runner.rb
287 lines (248 loc) · 10.4 KB
/
ansible_runner.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
require 'shellwords'
require 'yaml'
require 'smart_proxy_dynflow/runner/process_manager_command'
require 'smart_proxy_dynflow/runner/base'
require 'smart_proxy_dynflow/runner/parent'
module Proxy::Ansible
module Runner
class AnsibleRunner < ::Proxy::Dynflow::Runner::Parent
include ::Proxy::Dynflow::Runner::ProcessManagerCommand
attr_reader :execution_timeout_interval
# To make this overridable in development
ENVIRONMENT_WRAPPER = ENV['SMART_PROXY_ANSIBLE_ENVIRONMENT_WRAPPER'] || '/usr/libexec/foreman-proxy/ansible-runner-environment'
def initialize(input, suspended_action:, id: nil)
super input, :suspended_action => suspended_action, :id => id
@inventory = rebuild_secrets(rebuild_inventory(input), input)
action_input = input.values.first[:input][:action_input]
@playbook = action_input[:script]
@root = working_dir
@verbosity_level = action_input[:verbosity_level]
@rex_command = action_input[:remote_execution_command]
@check_mode = action_input[:check_mode]
@tags = action_input[:tags]
@tags_flag = action_input[:tags_flag]
@passphrase = action_input['secrets']['key_passphrase']
@execution_timeout_interval = action_input[:execution_timeout_interval]
@cleanup_working_dirs = action_input.fetch(:cleanup_working_dirs, true)
end
def start
prepare_directory_structure
write_inventory
write_playbook
write_ssh_key if !@passphrase.nil? && !@passphrase.empty?
start_ansible_runner
end
def run_refresh_output
logger.debug('refreshing runner on demand')
process_artifacts
generate_updates
end
def timeout
logger.debug('job timed out')
super
end
def timeout_interval
execution_timeout_interval
end
def kill
::Process.kill('SIGTERM', @process_manager.pid)
publish_exit_status(2)
@inventory['all']['hosts'].each { |hostname| @exit_statuses[hostname] = 2 }
broadcast_data('Timeout for execution passed, stopping the job', 'stderr')
close
end
def close
super
FileUtils.remove_entry(@root) if @tmp_working_dir && Dir.exist?(@root) && @cleanup_working_dirs
end
def publish_exit_status(status)
process_artifacts
super
end
def initialize_command(*command)
super
@process_manager.stdin.close unless @process_manager.done?
end
private
def process_artifacts
@counter ||= 1
@uuid ||= if (f = Dir["#{@root}/artifacts/*"].first)
File.basename(f)
end
return unless @uuid
job_event_dir = File.join(@root, 'artifacts', @uuid, 'job_events')
loop do
files = Dir["#{job_event_dir}/*.json"].map do |file|
num = File.basename(file)[/\A\d+/].to_i unless file.include?('partial')
[file, num]
end
files_with_nums = files.select { |(_, num)| num && num >= @counter }.sort_by(&:last)
break if files_with_nums.empty?
logger.debug("[foreman_ansible] - processing event files: #{files_with_nums.map(&:first).inspect}}")
files_with_nums.map(&:first).each { |event_file| handle_event_file(event_file) }
@counter = files_with_nums.last.last + 1
end
end
def handle_event_file(event_file)
logger.debug("[foreman_ansible] - parsing event file #{event_file}")
begin
event = JSON.parse(File.read(event_file))
if (hostname = hostname_for_event(event))
handle_host_event(hostname, event)
else
handle_broadcast_data(event)
end
true
rescue JSON::ParserError => e
logger.error("[foreman_ansible] - Error parsing runner event at #{event_file}: #{e.class}: #{e.message}")
logger.debug(e.backtrace.join("\n"))
end
end
def hostname_for_event(event)
hostname = event.dig('event_data', 'host') || event.dig('event_data', 'remote_addr')
return nil if hostname.nil? || hostname.empty?
unless @targets.key?(hostname)
logger.warn("handle_host_event: unknown host #{hostname} for event '#{event['event']}', broadcasting")
return nil
end
hostname
end
def handle_host_event(hostname, event)
log_event("for host: #{hostname.inspect}", event)
publish_data_for(hostname, event['stdout'] + "\n", 'stdout') if event['stdout']
case event['event']
when 'runner_on_ok'
publish_exit_status_for(hostname, 0) if @exit_statuses[hostname].nil?
when 'runner_on_unreachable'
publish_exit_status_for(hostname, 1)
when 'runner_on_failed'
publish_exit_status_for(hostname, 2) if event.dig('event_data', 'ignore_errors').nil?
end
end
def handle_broadcast_data(event)
log_event("broadcast", event)
if event['event'] == 'playbook_on_stats'
failures = event.dig('event_data', 'failures') || {}
unreachable = event.dig('event_data', 'dark') || {}
header, *rows = event['stdout'].strip.lines.map(&:chomp)
@outputs.keys.select { |key| key.is_a? String }.each do |host|
line = rows.find { |row| row =~ /#{host}/ }
publish_data_for(host, [header, line].join("\n"), 'stdout')
# If the task has been rescued, it won't consider a failure
if @exit_statuses[host].to_i != 0 && failures[host].to_i <= 0 && unreachable[host].to_i <= 0
publish_exit_status_for(host, 0)
end
end
else
broadcast_data(event['stdout'] + "\n", 'stdout')
end
end
def write_inventory
path = File.join(@root, 'inventory', 'hosts')
data_path = File.join(@root, 'data')
inventory_script = <<~INVENTORY_SCRIPT
#!/bin/sh
cat #{::Shellwords.escape data_path}
INVENTORY_SCRIPT
File.write(path, inventory_script)
File.write(data_path, JSON.dump(@inventory))
File.chmod(0o0755, path)
end
def write_playbook
File.write(File.join(@root, 'project', 'playbook.yml'), @playbook)
end
def write_ssh_key
key_path = File.join(@root, 'env', 'ssh_key')
File.symlink(File.expand_path(Proxy::RemoteExecution::Ssh::Plugin.settings[:ssh_identity_key_file]), key_path)
passwords_path = File.join(@root, 'env', 'passwords')
# here we create a secrets file for ansible-runner, which uses the key as regexp
# to match line asking for password, given the limitation to match only first 100 chars
# and the fact the line contains dynamically created temp directory, the regexp
# mentions only things that are always there, such as artifacts directory and the key name
secrets = YAML.dump({ "for.*/artifacts/.*/ssh_key_data:" => @passphrase })
File.write(passwords_path, secrets, perm: 0o600)
end
def start_ansible_runner
env = {}
env['FOREMAN_CALLBACK_DISABLE'] = '1' if @rex_command
env['SMART_PROXY_ANSIBLE_ENVIRONMENT_FILE'] = Proxy::Ansible::Plugin.settings[:ansible_environment_file]
command = ['ansible-runner', 'run', @root, '-p', 'playbook.yml']
command << '--cmdline' << cmdline unless cmdline.nil?
command << verbosity if verbose?
initialize_command(env, ENVIRONMENT_WRAPPER, *command)
logger.debug("[foreman_ansible] - Running command '#{command.join(' ')}'")
end
def cmdline
cmd_args = [tags_cmd, check_cmd].reject(&:empty?)
return nil unless cmd_args.any?
cmd_args.join(' ')
end
def tags_cmd
flag = @tags_flag == 'include' ? '--tags' : '--skip-tags'
@tags.empty? ? '' : "#{flag} '#{Array(@tags).join(',')}'"
end
def check_cmd
check_mode? ? '"--check"' : ''
end
def verbosity
'-' + 'v' * @verbosity_level.to_i
end
def verbose?
@verbosity_level.to_i.positive?
end
def check_mode?
@check_mode == true && @rex_command == false
end
def prepare_directory_structure
inner = %w[inventory project env].map { |part| File.join(@root, part) }
([@root] + inner).each do |path|
FileUtils.mkdir_p path
end
end
def log_event(description, event)
# TODO: replace this ugly code with block variant once https://github.com/Dynflow/dynflow/pull/323
# arrives in production
logger.debug("[foreman_ansible] - handling event #{description}: #{JSON.pretty_generate(event)}") if logger.level <= ::Logger::DEBUG
end
# Each per-host task has inventory only for itself, we must
# collect all the partial inventories into one large inventory
# containing all the hosts.
def rebuild_inventory(input)
action_inputs = input.values.map { |hash| hash[:input][:action_input] }
hostnames = action_inputs.map { |hash| hash[:name] }
inventories = action_inputs.map { |hash| hash[:ansible_inventory] }
host_vars = inventories.map { |i| i['_meta']['hostvars'] }.reduce({}) do |acc, hosts|
hosts.reduce(acc) do |inner_acc, (hostname, vars)|
vars[:ansible_ssh_private_key_file] ||= Proxy::RemoteExecution::Ssh::Plugin.settings[:ssh_identity_key_file]
inner_acc.merge(hostname => vars)
end
end
{ '_meta' => { 'hostvars' => host_vars },
'all' => { 'hosts' => hostnames,
'vars' => inventories.first['all']['vars'] } }
end
def working_dir
return @root if @root
dir = Proxy::Ansible::Plugin.settings[:working_dir]
@tmp_working_dir = true
if dir.nil?
Dir.mktmpdir
else
Dir.mktmpdir(nil, File.expand_path(dir))
end
end
def rebuild_secrets(inventory, input)
input.each do |host, host_input|
secrets = host_input['input']['action_input']['secrets']
per_host = secrets['per-host'][host]
new_secrets = {
'ansible_password' => inventory['ssh_password'] || per_host['ansible_password'],
'ansible_become_password' => inventory['effective_user_password'] || per_host['ansible_become_password']
}
inventory['_meta']['hostvars'][host].update(new_secrets)
end
inventory
end
end
end
end