Permalink
Browse files

(PDB-221) Add facts to import/export

This commit imports/exports facts similar to how we currently import/export
catalogs and reports. Anonymize doesn't currently work for facts, which is
going to be added separately.
  • Loading branch information...
1 parent fb4bdbf commit 4e7ad252cac075922fda12401260380f5094ffcc @senior committed Feb 19, 2014
View
@@ -595,6 +595,7 @@ def compare_export_data(export_file1, export_file2, opts={})
:catalogs => true,
:metadata => true,
:reports => true,
+ :facts => true
}.merge(opts)
# NOTE: I'm putting this tmpdir inside of cwd because I expect for that to
@@ -626,13 +627,19 @@ def compare_export_data(export_file1, export_file2, opts={})
compare_metadata(f, expected_path) if opts[:metadata]
when :report
compare_report(f, expected_path) if opts[:reports]
+ when :facts
+ compare_facts(f, expected_path) if opts[:facts]
when :unknown
fail("Unrecognized file found in archive: '#{relative_path}'")
end
end
export2_files = Set.new(
Dir.glob("#{export_dir2}/**/*").map { |f| f.sub(/^#{Regexp.escape(export_dir2)}\//, "") })
+
+ export1_files.delete_if{ |path| !opts[:facts] && /^puppetdb-bak\/facts.*/.match(path)}
+ export2_files.delete_if{ |path| !opts[:facts] && /^puppetdb-bak\/facts.*/.match(path)}
+
diff = export2_files - export1_files
assert(diff.empty?, "Export file '#{export_file2}' contains extra file entries: '#{diff.to_a.join("', '")}'")
@@ -648,11 +655,25 @@ def get_export_entry_type(path)
:catalog
when /^puppetdb-bak\/reports\/.*\.json$/
:report
+ when /^puppetdb-bak\/facts\/.*\.json$/
+ :facts
else
:unknown
end
end
+ def compare_facts(facts1_path, facts2_path)
+ f1 = JSON.parse(File.read(facts1_path))
+ f2 = JSON.parse(File.read(facts2_path))
+
+ diff = hash_diff(f1, f2)
+
+ if (diff)
+ diff = JSON.pretty_generate(diff)
+ end
+
+ assert(diff == nil, "Catalogs '#{facts1_path}' and '#{facts2_path}' don't match!' Diff:\n#{diff}")
+ end
def compare_catalog(cat1_path, cat2_path)
cat1 = munge_catalog_for_comparison(cat1_path)
@@ -1011,7 +1032,8 @@ def create_remote_site_pp(host, manifest)
remote_path
end
- def run_agents_with_new_site_pp(host, manifest)
+ def run_agents_with_new_site_pp(host, manifest, env_vars = {})
+
manifest_path = create_remote_site_pp(host, manifest)
with_puppet_running_on host, {
'master' => {
@@ -1020,7 +1042,9 @@ def run_agents_with_new_site_pp(host, manifest)
'autosign' => 'true',
'manifest' => manifest_path
}} do
- run_agent_on agents, "--test --server #{host}", :acceptable_exit_codes => [0,2]
+ #only some of the opts work on puppet_agent, acceptable exit codes does not
+ agents.each{ |agent| on agent, puppet_agent("--test --server #{host}", { 'ENV' => env_vars }), :acceptable_exit_codes => [0,2] }
+
end
end
@@ -47,11 +47,11 @@
if type == "none"
step "verify original export data matches new export data" do
- compare_export_data(export_file1, export_file2)
+ compare_export_data(export_file1, export_file2, :facts => false)
end
else
step "verify anonymized data matches new export data" do
- compare_export_data(anon_file, export_file2)
+ compare_export_data(anon_file, export_file2, :facts => false)
end
end
end
@@ -15,6 +15,10 @@ def restart_to_gc(database)
test_name "validate that nodes are deactivated and deleted based on ttl settings" do
+ step "clear puppetdb database so that we can import into a clean db" do
+ clear_and_restart_puppetdb(database)
+ end
+
with_puppet_running_on master, {
'master' => {
'autosign' => 'true'
@@ -13,9 +13,16 @@
}
MANIFEST
- run_agents_with_new_site_pp(master, manifest)
+ run_agents_with_new_site_pp(master, manifest, {"facter_foo" => "bar"})
end
+ step "verify foo fact present" do
+ result = on master, "puppet facts find #{master.node_name} --terminus puppetdb"
+ facts = JSON.parse(result.stdout.strip)
+ assert_equal('bar', facts['values']['foo'], "Failed to retrieve facts for '#{master.node_name}' via inventory service!")
+ end
+
+
export_file1 = "./puppetdb-export1.tar.gz"
export_file2 = "./puppetdb-export2.tar.gz"
@@ -33,6 +40,12 @@
sleep_until_queue_empty(database)
end
+ step "verify facts were exported/imported correctly" do
+ result = on master, "puppet facts find #{master.node_name} --terminus puppetdb"
+ facts = JSON.parse(result.stdout.strip)
+ assert_equal('bar', facts['values']['foo'], "Failed to retrieve facts for '#{master.node_name}' via inventory service!")
+ end
+
step "export data from puppetdb again" do
on database, "#{sbin_loc}/puppetdb export --outfile #{export_file2}"
scp_from(database, export_file2, ".")
@@ -18,13 +18,25 @@
[clojure.java.io :as io]
[clj-http.client :as client]
[com.puppetlabs.archive :as archive]
- [slingshot.slingshot :refer [try+]]))
+ [slingshot.slingshot :refer [try+]]
+ [com.puppetlabs.puppetdb.schema :as pls]
+ [schema.core :as s]))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Internal Schemas
+
+(def tar-item {:msg String
+ :file-suffix [String]
+ :contents String})
(def cli-description "Export all PuppetDB catalog data to a backup file")
(def export-metadata-file-name "export-metadata.json")
(def export-root-dir "puppetdb-bak")
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Catalog Exporting
+
(defn catalog-for-node
"Given a node name, retrieve the catalog for the node."
[host port node]
@@ -39,6 +51,46 @@
{ :accept :json})]
(when (= status 200) body)))
+(pls/defn-validated catalog->tar :- tar-item
+ "Create a tar-item map for the `catalog`"
+ [node :- String
+ catalog-json-str :- String]
+ {:msg (format "Writing catalog for node '%s'" node)
+ :file-suffix ["catalogs" (format "%s.json" node)]
+ :contents catalog-json-str})
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Fact Exporting
+
+(pls/defn-validated facts-for-node
+ :- {String s/Any}
+ "Given a node name, retrieve the catalog for the node."
+ [host :- String
+ port :- s/Int
+ node :- String]
+ (let [{:keys [status body]} (client/get
+ (format
+ "http://%s:%s/v3/nodes/%s/facts"
+ host port node)
+ {:accept :json})]
+ (when (= status 200)
+ (reduce (fn [acc {:strs [name value]}]
+ (assoc acc name value))
+ {} (json/parse-string body)))))
+
+(pls/defn-validated facts->tar :- tar-item
+ "Creates a tar-item map for the collection of facts"
+ [node :- String
+ facts :- {String s/Any}]
+ {:msg (format "Writing facts for node '%s'" node)
+ :file-suffix ["facts" (format "%s.json" node)]
+ :contents (json/generate-pretty-string
+ {"name" node
+ "values" facts})})
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Report Exporting
+
(defn events-for-report-hash
"Given a report hash, returns all events as a vector of maps."
[host port report-hash]
@@ -78,6 +130,36 @@
#(merge % {:resource-events (events-for-report-hash host port (get % :hash))})
(json/parse-string body true))))))
+(pls/defn-validated report->tar :- [tar-item]
+ "Create a tar-item map for the `report`"
+ [node :- String
+ reports :- [{:configuration-version s/Any
+ :start-time s/Any
+ s/Any s/Any}]]
+ (mapv (fn [{:keys [configuration-version start-time] :as report}]
+ {:msg (format "Writing report '%s-%s' for node '%s'" start-time configuration-version node)
+ :file-suffix ["reports" (format "%s-%s-%s.json" node start-time configuration-version)]
+ :contents (json/generate-pretty-string (dissoc report :hash))})
+ reports))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Node Exporting
+
+(pls/defn-validated get-node-data
+ :- {:node String
+ :facts [tar-item]
+ :reports [tar-item]
+ :catalog [tar-item]}
+ "Returns tar-item maps for the reports, facts and catalog of the given
+ node, ready for being written to the filesystem"
+ [host :- String
+ port :- s/Int
+ node :- String]
+ {:node node
+ :facts [(facts->tar node (facts-for-node host port node))]
+ :reports (report->tar node (reports-for-node host port node))
+ :catalog [(catalog->tar node (catalog-for-node host port node))]})
+
(defn get-active-node-names
"Get a list of the names of all active nodes."
[host port]
@@ -92,50 +174,25 @@
(filter #(not (nil? (:catalog_timestamp %)))
(json/parse-string body true))))))
-(def export-metadata
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Metadata Exporting
+
+(pls/defn-validated export-metadata :- tar-item
"Metadata about this export; used during import to ensure version compatibility."
- {:timestamp (now)
- :command-versions
- ;; This is not ideal that we are hard-coding the command version here, but
- ;; in our current architecture I don't believe there is any way to introspect
- ;; on which version of the `replace catalog` matches up with the current
- ;; version of the `catalog` endpoint... or even to query what the latest
- ;; version of a command is. We should improve that.
- {:replace-catalog catalog-version
- :store-report 2}})
-
-(defn get-catalog-for-node
- "Utility function for retrieving catalog data from the PuppetDB web service.
- Returns a map containing the node name and the corresponding catalog; this
- allows us to run this function against multiple nodes in parallel, and still
- be able to identify which node we've retrieved the data for when it returns."
- [host port node]
- {:pre [(string? host)
- (integer? port)
- (string? node)]
- :post [(map? %)
- (contains? % :node)
- (contains? % :catalog)]}
- {:node node
- :catalog (catalog-for-node host port node)})
-
-(defn get-reports-for-node
- "Utility function for retrieving report data from the PuppetDB web service.
- Returns a map containing the node name and all the reports related to the
- node; this allows us to run this function against multiple nodes in parallel,
- and still be able to identify which node we've retrieved the data for when
- it returns."
- [host port node]
- {:pre [(string? host)
- (integer? port)
- (string? node)]
- :post [(map? %)
- (contains? % :node)
- (string? (get % :node))
- (contains? % :reports)
- (seq? (get % :reports))]}
- {:node node
- :reports (reports-for-node host port node)})
+ []
+ {:msg (str "Exporting PuppetDB metadata")
+ :file-suffix [export-metadata-file-name]
+ :contents (json/generate-pretty-string
+ {:timestamp (now)
+ :command-versions
+ ;; This is not ideal that we are hard-coding the command version here, but
+ ;; in our current architecture I don't believe there is any way to introspect
+ ;; on which version of the `replace catalog` matches up with the current
+ ;; version of the `catalog` endpoint... or even to query what the latest
+ ;; version of a command is. We should improve that.
+ {:replace-catalog catalog-version
+ :store-report 2
+ :facts 1}})})
(defn- validate-cli!
[args]
@@ -151,32 +208,25 @@
:puppetlabs.kitchensink.core/cli-error (System/exit 1)
:puppetlabs.kitchensink.core/cli-help (System/exit 0))))))
+(pls/defn-validated add-entry
+ :- nil
+ "Writes the given `tar-item` to `tar-writer` using
+ export-root-directory as the base directory for contents"
+ [tar-writer
+ {:keys [file-suffix contents]} :- tar-item]
+ (archive/add-entry tar-writer "UTF-8"
+ (.getPath (apply io/file export-root-dir file-suffix))
+ contents))
+
(defn -main
[& args]
(let [[{:keys [outfile host port]} _] (validate-cli! args)
- nodes (get-active-node-names host port)
- get-catalog-fn (partial get-catalog-for-node host port)
- get-reports-fn (partial get-reports-for-node host port)]
-;; TODO: do we need to deal with SSL or can we assume this only works over a plaintext port?
+ nodes (get-active-node-names host port)]
+ ;; TODO: do we need to deal with SSL or can we assume this only works over a plaintext port?
(with-open [tar-writer (archive/tarball-writer outfile)]
- (archive/add-entry tar-writer "UTF-8"
- (.getPath (io/file export-root-dir export-metadata-file-name))
- (json/generate-string export-metadata {:pretty true}))
-
- ;; Write out catalogs
- (doseq [node nodes]
- (println (format "Writing catalog for node '%s'" node))
- (archive/add-entry tar-writer "UTF-8"
- (.getPath (io/file export-root-dir "catalogs" (format "%s.json" node)))
- (:catalog (get-catalog-fn node))))
-
- ;; Write out reports
+ (add-entry tar-writer (export-metadata))
(doseq [node nodes
- report (:reports (get-reports-fn node))]
- (let [confversion (get report :configuration-version)
- starttime (get report :start-time)
- reportstr (json/generate-string (dissoc report :hash) {:pretty true})]
- (println (format "Writing report '%s-%s' for node '%s'" starttime confversion node))
- (archive/add-entry tar-writer "UTF-8"
- (.getPath (io/file export-root-dir "reports" (format "%s-%s-%s.json" node starttime confversion)))
- reportstr))))))
+ :let [node-data (get-node-data host port node)]]
+ (doseq [{:keys [msg] :as tar-item} (mapcat node-data [:catalog :reports :facts])]
+ (println msg)
+ (add-entry tar-writer tar-item))))))
Oops, something went wrong.

0 comments on commit 4e7ad25

Please sign in to comment.