From 3e69afdde3c9781e6097a6ee179253e0774ced63 Mon Sep 17 00:00:00 2001
From: ryanmiller-1 <40173609+ryanmiller-1@users.noreply.github.com>
Date: Wed, 8 Jul 2020 14:51:10 -0400
Subject: [PATCH 01/49] MMT-2231 Add revisions page to published UMM-T page
(#610)
* MMT-2231 Adding revisions page to Tool Records
* MMT-2231 updating display_header_subtitle to work with drafts again
---
app/controllers/tools_controller.rb | 2 +-
app/helpers/manage_metadata_helper.rb | 2 +-
app/views/tools/revisions.html.erb | 119 +++++++++++++++++++
app/views/tools/show.html.erb | 3 +-
config/routes.rb | 1 +
spec/features/services/revision_list_spec.rb | 2 -
spec/features/tools/revision_list_spec.rb | 90 ++++++++++++++
7 files changed, 213 insertions(+), 6 deletions(-)
create mode 100644 app/views/tools/revisions.html.erb
create mode 100644 spec/features/tools/revision_list_spec.rb
diff --git a/app/controllers/tools_controller.rb b/app/controllers/tools_controller.rb
index acfdbda29..d92f31a8e 100644
--- a/app/controllers/tools_controller.rb
+++ b/app/controllers/tools_controller.rb
@@ -2,7 +2,7 @@
class ToolsController < BasePublishedRecordController
include ManageMetadataHelper
- before_action :set_tool, only: [:show, :edit] #, :clone, :destroy, :revisions, :revert, :download_json]
+ before_action :set_tool, only: [:show, :edit, :revisions] #, :clone, :destroy, :revert, :download_json]
before_action :set_schema, only: [:show, :edit] #, :clone, :destroy]
before_action :ensure_supported_version, only: [:show, :edit]
before_action :ensure_correct_provider, only: [:edit] #, :clone, :destroy]
diff --git a/app/helpers/manage_metadata_helper.rb b/app/helpers/manage_metadata_helper.rb
index 43f752208..c420b2c47 100644
--- a/app/helpers/manage_metadata_helper.rb
+++ b/app/helpers/manage_metadata_helper.rb
@@ -80,7 +80,7 @@ def resource_type
end
def display_header_subtitle(metadata, type)
- return unless type.downcase.include?('variable') || type.downcase.include?('service')
+ return unless ['variable', 'service', 'tool'].any? { |type_fragment| type.downcase.include?(type_fragment) }
metadata['LongName'] || 'Long Name Not Provided'
end
diff --git a/app/views/tools/revisions.html.erb b/app/views/tools/revisions.html.erb
new file mode 100644
index 000000000..9c4f2a277
--- /dev/null
+++ b/app/views/tools/revisions.html.erb
@@ -0,0 +1,119 @@
+<% content_for :header_title do %>
+
<%= fetch_entry_id(@tool, 'tools') %>
+ <%= display_header_subtitle(@tool, 'tool') %>
+<% end %>
+
+<% if @errors && !@errors.empty? %>
+
+
+
+
+ <% @errors.each do |error| %>
+ -
+ <%= "#{error[:field]}, " if error[:field] %>
+ <%= error[:error] %>
+ <% if error[:request_id] %>
+ Click here to submit feedback
+ <% end %>
+
+ <% end %>
+
+
+
+
+<% end %>
+
+
+
+ <% if @error %>
+
+
+
+ This tool could not be updated. You may <%= link_to 'edit', edit_tool_path(revision_id: @revision_id) %> the tool to resolve these issues.
+
+
+
+ <% end %>
+
+
diff --git a/app/views/tools/show.html.erb b/app/views/tools/show.html.erb
index 9cf647aed..6665cf2bb 100644
--- a/app/views/tools/show.html.erb
+++ b/app/views/tools/show.html.erb
@@ -133,8 +133,7 @@
} %>
- <%= link_to 'Revisions', '#', class: 'eui-btn--link disabled' %>
- <%#= link_to "Revisions (#{@revisions.size})", tool_revisions_path, class: 'eui-btn--link disabled' %>
+ <%= link_to "Revisions (#{@revisions.size})", tool_revisions_path, class: 'eui-btn--link' %>
diff --git a/config/routes.rb b/config/routes.rb
index 4b61880ee..6e8cc5c8a 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -104,6 +104,7 @@
get '/services/:id/download_json(/:revision_id)' => 'services#download_json', as: 'download_json_service'
resources :tools, only: [:show, :create, :edit]
+ get '/tools/:id/revisions' => 'tools#revisions', as: 'tool_revisions'
resources :variable_drafts, controller: 'variable_drafts', draft_type: 'VariableDraft' do
member do
diff --git a/spec/features/services/revision_list_spec.rb b/spec/features/services/revision_list_spec.rb
index 80a1a2c18..6fbe57f1d 100644
--- a/spec/features/services/revision_list_spec.rb
+++ b/spec/features/services/revision_list_spec.rb
@@ -1,5 +1,3 @@
-require 'rails_helper'
-
describe 'Service revision list', reset_provider: true, js: true do
context 'when viewing a published service' do
before do
diff --git a/spec/features/tools/revision_list_spec.rb b/spec/features/tools/revision_list_spec.rb
new file mode 100644
index 000000000..b33ba78f1
--- /dev/null
+++ b/spec/features/tools/revision_list_spec.rb
@@ -0,0 +1,90 @@
+describe 'Tool revision list', reset_provider: true, js: true do
+ context 'when viewing a published tool' do
+ before :all do
+ @ingest_response, @concept_response, @native_id = publish_tool_draft(revision_count: 2)
+ end
+
+ # TODO: remove after CMR-6332
+ after :all do
+ delete_response = cmr_client.delete_tool('MMT_2', @native_id, 'token')
+
+ raise unless delete_response.success?
+ end
+
+ before do
+ login
+
+ visit tool_path(@ingest_response['concept-id'])
+ end
+
+ it 'displays the number of revisions' do
+ expect(page).to have_content('Revisions (2)')
+ end
+
+ context 'when clicking on the revision link' do
+ before do
+ wait_for_cmr
+ click_on 'Revisions'
+ end
+
+ it 'displays the revision page' do
+ expect(page).to have_content('Revision History')
+ end
+
+ it 'displays the tool long name' do
+ expect(page).to have_content(@concept_response.body['LongName'])
+ end
+
+ it 'displays when the revision was made' do
+ expect(page).to have_content(today_string, count: 2)
+ end
+
+ it 'displays what user made the revision' do
+ expect(page).to have_content('typical', count: 2)
+ end
+
+# TODO: Uncomment in MMT-2233
+# it 'displays the correct phrasing for reverting records' do
+# expect(page).to have_content('Revert to this Revision', count: 1)
+# end
+
+# TODO: Uncomment in MMT-2232
+# context 'when viewing an old revision' do
+# link_text = 'You are viewing an older revision of this tool. Click here to view the latest published version.'
+# before do
+# all('a', text: 'View').last.click
+# end
+#
+# it 'displays a message that the revision is old' do
+# expect(page).to have_link(link_text)
+# end
+#
+# it 'does not display a link to manage collection associations' do
+# expect(page).to have_no_link('Manage Collection Associations')
+# end
+#
+# context 'when clicking the message' do
+# before do
+# click_on link_text
+# end
+#
+# it 'displays the latest revision to the user' do
+# expect(page).to have_no_link(link_text)
+# end
+# end
+# end
+ end
+
+ context 'when searching for the tool' do
+ before do
+ full_search(record_type: 'Tools', keyword: @concept_response.body['LongName'], provider: 'MMT_2')
+ end
+
+ it 'only displays the latest revision' do
+ within '#tool-search-results' do
+ expect(page).to have_content(@concept_response.body['LongName'], count: 1)
+ end
+ end
+ end
+ end
+end
From 5e6a0a8af7c9dcf56013d704c34469c8d86a14e4 Mon Sep 17 00:00:00 2001
From: ryanmiller-1 <40173609+ryanmiller-1@users.noreply.github.com>
Date: Thu, 9 Jul 2020 12:09:14 -0400
Subject: [PATCH 02/49] MMT-2288 Update jquery version that is being package
for the preview gem (#611)
* MMT-2288 updated version for preview gem
* MMT-2288 updating preview gem commit ref.
---
Gemfile | 2 +-
Gemfile.lock | 6 +++---
lib/tasks/local_cmr.rake | 8 ++++----
3 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/Gemfile b/Gemfile
index f7b917df1..3c70e72a4 100644
--- a/Gemfile
+++ b/Gemfile
@@ -78,7 +78,7 @@ gem 'browser'
# bundle config local.cmr_metadata_preview /path/to/local/git/repository
# make sure to delete the local config when done making changes to merge into master
# bundle config --delete local.cmr_metadata_preview
-gem 'cmr_metadata_preview', git: 'https://git.earthdata.nasa.gov/scm/cmr/cmr_metadata_preview.git', ref: 'fff65949cc6'
+gem 'cmr_metadata_preview', git: 'https://git.earthdata.nasa.gov/scm/cmr/cmr_metadata_preview.git', ref: '1f6ffd54d65'
group :development, :test do
# Call 'byebug' anywhere in the code to stop execution and get a debugger console
diff --git a/Gemfile.lock b/Gemfile.lock
index 61ce28ff4..e2d476f3d 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,9 +1,9 @@
GIT
remote: https://git.earthdata.nasa.gov/scm/cmr/cmr_metadata_preview.git
- revision: fff65949cc62d397b0675e9fe46e962b8cf43228
- ref: fff65949cc6
+ revision: 1f6ffd54d6570f9f920078a84fd51750db3c21ab
+ ref: 1f6ffd54d65
specs:
- cmr_metadata_preview (0.2.2)
+ cmr_metadata_preview (0.2.3)
georuby
rails (~> 5.2.0)
sprockets (< 4.0)
diff --git a/lib/tasks/local_cmr.rake b/lib/tasks/local_cmr.rake
index 4e5689b9b..ed0ff1f54 100644
--- a/lib/tasks/local_cmr.rake
+++ b/lib/tasks/local_cmr.rake
@@ -153,11 +153,11 @@ namespace :cmr do
File.join(Rails.root.to_s, 'vendor', 'assets', 'javascripts', 'eui-1.0.0', 'eui.js')
]
- # TODO: move to version 3 of jquery
- # it is not currently understood how this section works to select jquery
- # currently the preview gem is not running with version 3, but 1
+ # Find the path to jquery
jquery = Rails.application.config.assets.paths.select { |p| p.to_s.include?('jquery-rails') }
- dependencies.unshift(File.join(jquery.first, 'jquery.js')) if jquery.any?
+ # Include a specific file. jquery-rails has files for each major version
+ # stored in the above location
+ dependencies.unshift(File.join(jquery.first, 'jquery3.js')) if jquery.any?
js_to_uglify = dependencies.sort.map do |file|
puts "- Reading #{file}"
From 994cbb17c16c552bff3f23786cbad15d11bd4c96 Mon Sep 17 00:00:00 2001
From: ryanmiller-1 <40173609+ryanmiller-1@users.noreply.github.com>
Date: Thu, 9 Jul 2020 12:09:32 -0400
Subject: [PATCH 03/49] MMT-2230 (#608)
---
.../change_current_provider.coffee | 2 +
app/controllers/tools_controller.rb | 6 +-
app/helpers/tools_helper.rb | 12 +
app/models/tool_draft.rb | 10 +-
.../_not_current_provider_modal.html.erb | 2 +-
app/views/tools/show.html.erb | 17 +-
config/locales/en.yml | 3 +
config/routes.rb | 1 +
.../create_tool_draft_from_cloning_spec.rb | 62 +++++
spec/features/tools/tool_permissions_spec.rb | 238 ++++++++++++++++++
10 files changed, 333 insertions(+), 20 deletions(-)
create mode 100644 app/helpers/tools_helper.rb
create mode 100644 spec/features/tool_drafts/create_tool_draft_from_cloning_spec.rb
create mode 100644 spec/features/tools/tool_permissions_spec.rb
diff --git a/app/assets/javascripts/change_current_provider.coffee b/app/assets/javascripts/change_current_provider.coffee
index 540b4124c..321d72a24 100644
--- a/app/assets/javascripts/change_current_provider.coffee
+++ b/app/assets/javascripts/change_current_provider.coffee
@@ -74,6 +74,8 @@ $(document).ready ->
"Managing this service's collection associations"
when 'edit-tool'
'Editing this tool'
+ when 'clone-tool'
+ 'Cloning this tool'
$link.data('type', action)
$modal.find('span.provider').text(provider)
diff --git a/app/controllers/tools_controller.rb b/app/controllers/tools_controller.rb
index d92f31a8e..2c5686dbe 100644
--- a/app/controllers/tools_controller.rb
+++ b/app/controllers/tools_controller.rb
@@ -2,10 +2,10 @@
class ToolsController < BasePublishedRecordController
include ManageMetadataHelper
- before_action :set_tool, only: [:show, :edit, :revisions] #, :clone, :destroy, :revert, :download_json]
- before_action :set_schema, only: [:show, :edit] #, :clone, :destroy]
+ before_action :set_tool, only: [:show, :edit, :clone, :revisions] #, :destroy, :revert, :download_json]
+ before_action :set_schema, only: [:show, :edit, :clone] #, :destroy]
before_action :ensure_supported_version, only: [:show, :edit]
- before_action :ensure_correct_provider, only: [:edit] #, :clone, :destroy]
+ before_action :ensure_correct_provider, only: [:edit, :clone] #, :destroy]
before_action :set_preview, only: [:show]
# If clone is not defined like this performing the clone action leads to a `action not found error`
diff --git a/app/helpers/tools_helper.rb b/app/helpers/tools_helper.rb
new file mode 100644
index 000000000..19a5d5e6c
--- /dev/null
+++ b/app/helpers/tools_helper.rb
@@ -0,0 +1,12 @@
+module ToolsHelper
+ def render_change_provider_tool_action_link(tool_action, concept_id, revision_id = nil)
+ case tool_action
+ when 'edit'
+ link_to('Edit Service', edit_tool_path(concept_id, revision_id: revision_id), class: 'is-invisible', id: 'change-provider-tool-edit')
+ when 'clone'
+ link_to('Clone Service', clone_tool_path(concept_id, revision_id: revision_id), class: 'is-invisible', id: 'change-provider-tool-clone')
+ when 'delete'
+ link_to('Delete Service', tool_path(concept_id), method: :delete, class: 'is-invisible', id: 'change-provider-tool-delete')
+ end
+ end
+end
\ No newline at end of file
diff --git a/app/models/tool_draft.rb b/app/models/tool_draft.rb
index 7695bc77f..a5282947c 100644
--- a/app/models/tool_draft.rb
+++ b/app/models/tool_draft.rb
@@ -15,11 +15,11 @@ def create_from_tool(tool, user, native_id)
draft = self.find_or_initialize_by(native_id: native_id)
draft.entry_title = tool['LongName']
draft.short_name = tool['Name']
- # else
- # # Cloned Record
- # draft = self.new
- # tool.delete('Name')
- # tool.delete('LongName')
+ else
+ # Cloned Record
+ draft = self.new
+ tool.delete('Name')
+ tool.delete('LongName')
end
draft.set_user_and_provider(user)
diff --git a/app/views/shared/_not_current_provider_modal.html.erb b/app/views/shared/_not_current_provider_modal.html.erb
index 3843bc179..aeb5182bc 100644
--- a/app/views/shared/_not_current_provider_modal.html.erb
+++ b/app/views/shared/_not_current_provider_modal.html.erb
@@ -33,7 +33,7 @@
<%= link_to 'Yes', service_collection_associations_path(options[:concept_id]), class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-manage-service-associations-link' %>
<% elsif options[:tool] %>
<%= link_to 'Yes', edit_tool_path(options[:concept_id], revision_id: options[:revision_id]), class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-edit-tool-link' %>
- <%#= link_to 'Yes', clone_tool_path(options[:concept_id], revision_id: options[:revision_id]), class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-clone-tool-link' %>
+ <%= link_to 'Yes', clone_tool_path(options[:concept_id], revision_id: options[:revision_id]), class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-clone-tool-link' %>
<%#= link_to 'Yes', tool_path(options[:concept_id]), method: :delete, class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-delete-tool-link' %>
<% end %>
diff --git a/app/views/tools/show.html.erb b/app/views/tools/show.html.erb
index 6665cf2bb..14240393e 100644
--- a/app/views/tools/show.html.erb
+++ b/app/views/tools/show.html.erb
@@ -23,9 +23,7 @@
"#", id: "change-current-provider-banner-link",
data: { "provider": @provider_id, action_link: "change-provider-tool-#{@record_action}" }) %>
- <%# TODO: this method does not exist yet. It should be created and used when %>
- <%# additional actions are added to published Tool records %>
- <%#= render_change_provider_tool_action_link(@record_action, @concept_id, @revision_id) %>
+ <%= render_change_provider_tool_action_link(@record_action, @concept_id, @revision_id) %>
<% end %>
@@ -94,14 +92,11 @@
<% end %>
<% end %>
- <%# TODO: All links commented out and disabled links added for MMT-2238 %>
- <%# links should be re-enabled with the appropriate ticket %>
- <%= link_to 'Clone Tool Record', '#', class: 'eui-btn--link bar-after disabled' %>
- <%# if current_provider?(@provider_id) %>
- <%#= link_to 'Clone Tool Record', clone_tool_path(revision_id: @revision_id), class: 'eui-btn--link bar-after' %>
- <%# elsif available_provider?(@provider_id) %>
- <%#= link_to 'Clone Tool Record', '#not-current-provider-modal', class: 'display-modal not-current-provider eui-btn--link bar-after', data: { 'provider': @provider_id, record_action: 'clone-tool' } %>
- <%# end %>
+ <% if current_provider?(@provider_id) %>
+ <%= link_to 'Clone Tool Record', clone_tool_path(revision_id: @revision_id), class: 'eui-btn--link bar-after' %>
+ <% elsif available_provider?(@provider_id) %>
+ <%= link_to 'Clone Tool Record', '#not-current-provider-modal', class: 'display-modal not-current-provider eui-btn--link bar-after', data: { 'provider': @provider_id, record_action: 'clone-tool' } %>
+ <% end %>
<%= link_to 'Download JSON', '#', class: 'eui-btn--link disabled' %>
<%#= link_to 'Download JSON', download_json_tool_path(@concept_id, revision_id: @revision_id), class: 'eui-btn--link', target: '_blank' %>
diff --git a/config/locales/en.yml b/config/locales/en.yml
index bf9d5b6cb..d420d9298 100644
--- a/config/locales/en.yml
+++ b/config/locales/en.yml
@@ -198,6 +198,9 @@ en:
flash:
success: 'Tool Draft Published Successfully!'
error: 'Tool Draft was not published successfully'
+ clone:
+ flash:
+ notice: 'Records must have a unique Name and Long Name within a provider. Click here to enter a new Name and Long Name.'
collection_associations:
destroy:
flash:
diff --git a/config/routes.rb b/config/routes.rb
index 6e8cc5c8a..ce35fbe4d 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -104,6 +104,7 @@
get '/services/:id/download_json(/:revision_id)' => 'services#download_json', as: 'download_json_service'
resources :tools, only: [:show, :create, :edit]
+ get '/tools/:id/clone' => 'tools#clone', as: 'clone_tool'
get '/tools/:id/revisions' => 'tools#revisions', as: 'tool_revisions'
resources :variable_drafts, controller: 'variable_drafts', draft_type: 'VariableDraft' do
diff --git a/spec/features/tool_drafts/create_tool_draft_from_cloning_spec.rb b/spec/features/tool_drafts/create_tool_draft_from_cloning_spec.rb
new file mode 100644
index 000000000..76a02c85c
--- /dev/null
+++ b/spec/features/tool_drafts/create_tool_draft_from_cloning_spec.rb
@@ -0,0 +1,62 @@
+describe 'Creating a tool draft from cloning a tool', reset_provider: true, js: true do
+ before :all do
+ @ingest_response, _concept_response, @native_id = publish_tool_draft
+ end
+
+ after :all do
+ delete_response = cmr_client.delete_tool('MMT_2', @native_id, 'token')
+
+ raise unless delete_response.success?
+ end
+
+ context 'when cloning a published tool' do
+ before do
+ login
+
+ visit tool_path(@ingest_response['concept-id'])
+
+ click_on 'Clone Tool Record'
+ end
+
+ it 'displays the draft preview page' do
+ within '.eui-breadcrumbs' do
+ expect(page).to have_content('Tool Drafts')
+ end
+
+ expect(page).to have_content('Publish Tool Draft')
+ expect(page).to have_content('Delete Tool Draft')
+ expect(page).to have_content('Metadata Fields')
+ expect(page).to have_content('Tool Information')
+ end
+
+ it 'removes the Name and Long Name from the metadata' do
+ within '#tool_draft_draft_name_preview' do
+ expect(page).to have_css('p', text: 'No value for Name provided.')
+ end
+
+ within '#tool_draft_draft_long_name_preview' do
+ expect(page).to have_css('p', text: 'No value for Long Name provided.')
+ end
+ end
+
+ it 'creates a new native id for the draft' do
+ draft = ToolDraft.last
+ expect(draft.native_id).to eq("mmt_tool_#{draft.id}")
+ end
+
+ it 'displays a message that the draft needs a unique Name' do
+ expect(page).to have_content('Records must have a unique Name and Long Name within a provider. Click here to enter a new Name and Long Name.')
+ end
+
+ context 'when clicking the banner message to enter a new Name' do
+ before do
+ click_on 'Click here to enter a new Name and Long Name.'
+ end
+
+ it 'displays the empty Name and Long Name fields' do
+ expect(page).to have_field('Name', with: '')
+ expect(page).to have_field('Long Name', with: '')
+ end
+ end
+ end
+end
diff --git a/spec/features/tools/tool_permissions_spec.rb b/spec/features/tools/tool_permissions_spec.rb
new file mode 100644
index 000000000..f01413565
--- /dev/null
+++ b/spec/features/tools/tool_permissions_spec.rb
@@ -0,0 +1,238 @@
+describe 'Tools permissions', reset_provider: true, js: true do
+ let(:modal_text) { 'requires you change your provider context to MMT_2' }
+
+ context 'when viewing a tool' do
+ before do
+ login
+ end
+
+ context "when the tool's provider is in the users available providers" do
+ before :all do
+ @ingested_tool, _concept_response, @native_id_1 = publish_tool_draft
+# @ingested_tool_for_delete_modal, _concept_response, _native_id_2 = publish_tool_draft
+ end
+
+ after :all do
+ delete_response = cmr_client.delete_tool('MMT_2', @native_id_1, 'token')
+ # Second tool should be deleted in the delete test
+
+ raise unless delete_response.success?
+ end
+
+ before do
+ login(provider: 'MMT_1', providers: %w(MMT_1 MMT_2))
+
+ visit tool_path(@ingested_tool['concept-id'])
+ end
+
+ it 'displays the action links' do
+ expect(page).to have_link('Edit Tool Record')
+ expect(page).to have_link('Clone Tool Record')
+ expect(page).to have_link('Delete Tool Record')
+ end
+
+ context 'when clicking the edit link' do
+ before do
+ click_on 'Edit Tool Record'
+ end
+
+ it 'displays a modal informing the user they need to switch providers' do
+ expect(page).to have_content("Editing this tool #{modal_text}")
+ end
+
+ context 'when clicking Yes' do
+ before do
+ # click_on 'Yes'
+ find('.not-current-provider-link').click
+ wait_for_jQuery
+ end
+
+ it 'switches the provider context' do
+ expect(User.first.provider_id).to eq('MMT_2')
+ end
+
+ it 'creates a draft from the tool' do
+ expect(page).to have_content('Tool Draft Created Successfully!')
+ expect(Draft.where(provider_id: 'MMT_2').size).to eq(1)
+ end
+ end
+ end
+
+ context 'when clicking the clone link' do
+ before do
+ click_on 'Clone Tool Record'
+ end
+
+ it 'displays a modal informing the user they need to switch providers' do
+ expect(page).to have_content("Cloning this tool #{modal_text}")
+ end
+
+ context 'when clicking Yes' do
+ before do
+ find('.not-current-provider-link').click
+ wait_for_jQuery
+ end
+
+ it 'switches the provider context' do
+ expect(User.first.provider_id).to eq('MMT_2')
+ end
+
+ it 'creates a draft from the tool' do
+ expect(page).to have_content('Records must have a unique Name and Long Name within a provider. Click here to enter a new Name and Long Name.')
+ expect(Draft.where(provider_id: 'MMT_2').size).to eq(1)
+ end
+ end
+ end
+
+# TODO: Uncomment in MMT-2229
+# context 'when clicking the delete link' do
+# context 'when the tool has no associated collections' do
+# before do
+# visit tool_path(@ingested_tool_for_delete_modal['concept-id'])
+#
+# click_on 'Delete Tool Record'
+# end
+#
+# it 'displays a modal informing the user they need to switch providers' do
+# expect(page).to have_content("Deleting this tool #{modal_text}")
+# end
+#
+# it 'does not display a message about collection associations that will also be deleted' do
+# expect(page).to have_no_content('This tool is associated with')
+# expect(page).to have_no_content('collections. Deleting this tool will also delete the collection associations')
+# end
+# end
+#
+# context 'when deleting the tool' do
+# before do
+# ingested_tool_to_delete, _concept_response = publish_tool_draft
+#
+# visit tool_path(ingested_tool_to_delete['concept-id'])
+#
+# click_on 'Delete Tool Record'
+#
+# find('.not-current-provider-link').click
+# wait_for_jQuery
+# end
+#
+# it 'switches the provider context' do
+# expect(User.first.provider_id).to eq('MMT_2')
+# end
+#
+# it 'deletes the record' do
+# expect(page).to have_content('Tool Deleted Successfully!')
+# end
+# end
+# end
+
+ context 'when trying to visit the action paths directly' do
+ context 'when visiting the edit path directly' do
+ before do
+ edit_link = page.current_path + '/edit'
+ visit edit_link
+ end
+
+ it 'displays warning banner link to change provider' do
+ expect(page).to have_css('.eui-banner--warn')
+ expect(page).to have_content('You need to change your current provider to edit this tool')
+ end
+
+ context 'when clicking the warning banner link' do
+ before do
+ click_link('You need to change your current provider to edit this tool')
+ wait_for_jQuery
+ end
+
+ it 'switches the provider context' do
+ expect(User.first.provider_id).to eq('MMT_2')
+ end
+
+ it 'creates a draft from the tool' do
+ expect(page).to have_content('Tool Draft Created Successfully!')
+ expect(Draft.where(provider_id: 'MMT_2').size).to eq(1)
+ end
+ end
+ end
+
+ context 'when visiting the clone path directly' do
+ before do
+ clone_link = page.current_path + '/clone'
+ visit clone_link
+ end
+
+ it 'displays warning banner link to change provider' do
+ expect(page).to have_css('.eui-banner--warn')
+ expect(page).to have_content('You need to change your current provider to clone this tool')
+ end
+
+ context 'when clicking the warning banner link' do
+ before do
+ click_link('You need to change your current provider to clone this tool')
+ wait_for_jQuery
+ end
+
+ it 'switches the provider context' do
+ expect(User.first.provider_id).to eq('MMT_2')
+ end
+
+ it 'creates a draft from the tool' do
+ expect(page).to have_content('Records must have a unique Name and Long Name within a provider. Click here to enter a new Name and Long Name.')
+ expect(Draft.where(provider_id: 'MMT_2').size).to eq(1)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when the tools provider is not in the users available providers' do
+ before do
+ @ingested_not_available_provider_tool, _concept_response = publish_tool_draft(provider_id: 'SEDAC')
+
+ visit tool_path(@ingested_not_available_provider_tool['concept-id'])
+ end
+
+ it 'does not display the action links' do
+ expect(page).to have_no_link('Edit Tool Record')
+ expect(page).to have_no_link('Clone Tool Record')
+# TODO: Uncomment in MMT-2229
+# expect(page).to have_no_link('Delete Tool Record')
+ end
+
+ context 'when trying to visit the action paths directly' do
+ context 'when visiting the edit path directly' do
+ before do
+ edit_link = page.current_path + '/edit'
+ visit edit_link
+ end
+
+ it 'displays the no permissions banner message' do
+ expect(page).to have_css('.eui-banner--danger')
+ expect(page).to have_content("You don't have the appropriate permissions to edit this tool")
+ end
+
+ it 'displays the Access Denied message' do
+ expect(page).to have_content('Access Denied')
+ expect(page).to have_content('It appears you do not have access to edit this content.')
+ end
+ end
+
+ context 'when visiting the clone path directly' do
+ before do
+ clone_link = page.current_path + '/clone'
+ visit clone_link
+ end
+
+ it 'displays the no permissions banner message' do
+ expect(page).to have_css('.eui-banner--danger')
+ expect(page).to have_content("You don't have the appropriate permissions to clone this tool")
+ end
+
+ it 'displays the Access Denied message' do
+ expect(page).to have_content('Access Denied')
+ expect(page).to have_content('It appears you do not have access to clone this content.')
+ end
+ end
+ end
+ end
+ end
+end
From 31b1c59fccbe3e383fb792ac013aade001011eb9 Mon Sep 17 00:00:00 2001
From: William Valencia
Date: Fri, 10 Jul 2020 04:08:58 -0400
Subject: [PATCH 04/49] MMT-2229 Adding delete for published tools
---
.../change_current_provider.coffee | 2 +
app/controllers/tools_controller.rb | 6 +-
app/views/tools/show.html.erb | 17 ++--
config/locales/en.yml | 4 +
config/routes.rb | 2 +-
spec/features/tools/delete_tool_spec.rb | 96 +++++++++++++++++++
spec/features/tools/tool_permissions_spec.rb | 88 +++++++++--------
7 files changed, 162 insertions(+), 53 deletions(-)
create mode 100644 spec/features/tools/delete_tool_spec.rb
diff --git a/app/assets/javascripts/change_current_provider.coffee b/app/assets/javascripts/change_current_provider.coffee
index 321d72a24..08298d76b 100644
--- a/app/assets/javascripts/change_current_provider.coffee
+++ b/app/assets/javascripts/change_current_provider.coffee
@@ -76,6 +76,8 @@ $(document).ready ->
'Editing this tool'
when 'clone-tool'
'Cloning this tool'
+ when 'delete-tool'
+ 'Deleting this tool'
$link.data('type', action)
$modal.find('span.provider').text(provider)
diff --git a/app/controllers/tools_controller.rb b/app/controllers/tools_controller.rb
index 2c5686dbe..5819480be 100644
--- a/app/controllers/tools_controller.rb
+++ b/app/controllers/tools_controller.rb
@@ -2,10 +2,10 @@
class ToolsController < BasePublishedRecordController
include ManageMetadataHelper
- before_action :set_tool, only: [:show, :edit, :clone, :revisions] #, :destroy, :revert, :download_json]
- before_action :set_schema, only: [:show, :edit, :clone] #, :destroy]
+ before_action :set_tool, only: [:show, :edit, :clone, :destroy, :revisions] #, :revert, :download_json]
+ before_action :set_schema, only: [:show, :edit, :clone, :destroy]
before_action :ensure_supported_version, only: [:show, :edit]
- before_action :ensure_correct_provider, only: [:edit, :clone] #, :destroy]
+ before_action :ensure_correct_provider, only: [:edit, :clone, :destroy]
before_action :set_preview, only: [:show]
# If clone is not defined like this performing the clone action leads to a `action not found error`
diff --git a/app/views/tools/show.html.erb b/app/views/tools/show.html.erb
index 14240393e..19ed43cde 100644
--- a/app/views/tools/show.html.erb
+++ b/app/views/tools/show.html.erb
@@ -101,22 +101,21 @@
<%= link_to 'Download JSON', '#', class: 'eui-btn--link disabled' %>
<%#= link_to 'Download JSON', download_json_tool_path(@concept_id, revision_id: @revision_id), class: 'eui-btn--link', target: '_blank' %>
- <%= link_to 'Delete Tool Record', '#', class: 'display-modal delete-tool eui-btn--link bar-before disabled' %>
- <%# if current_provider?(@provider_id) %>
- <%#= link_to 'Delete Tool Record', "#delete-record-modal", class: 'display-modal delete-tool eui-btn--link bar-before' %>
- <%# elsif available_provider?(@provider_id) %>
- <%#= link_to 'Delete Tool Record', '#not-current-provider-modal', class: 'display-modal not-current-provider eui-btn--link bar-before', data: { 'provider': @provider_id, record_action: 'delete-tool', num_associated_collections: "#{@num_associated_collections}" } %>
- <%# end %>
+ <% if current_provider?(@provider_id) %>
+ <%= link_to 'Delete Tool Record', "#delete-record-modal", class: 'display-modal delete-tool eui-btn--link bar-before' %>
+ <% elsif available_provider?(@provider_id) %>
+ <%= link_to 'Delete Tool Record', '#not-current-provider-modal', class: 'display-modal not-current-provider eui-btn--link bar-before', data: { 'provider': @provider_id, record_action: 'delete-tool' } %>
+ <% end %>
-
+
<%= render partial: 'shared/not_current_provider_modal', locals: {
options: {
tool: @tool,
diff --git a/config/locales/en.yml b/config/locales/en.yml
index d420d9298..3ef4ec0cb 100644
--- a/config/locales/en.yml
+++ b/config/locales/en.yml
@@ -198,6 +198,10 @@ en:
flash:
success: 'Tool Draft Published Successfully!'
error: 'Tool Draft was not published successfully'
+ destroy:
+ flash:
+ success: 'Tool Deleted Successfully!'
+ error: 'Tool was not deleted successfully'
clone:
flash:
notice: 'Records must have a unique Name and Long Name within a provider. Click here to enter a new Name and Long Name.'
diff --git a/config/routes.rb b/config/routes.rb
index ce35fbe4d..ab8ae9839 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -103,7 +103,7 @@
get '/services/:id/clone' => 'services#clone', as: 'clone_service'
get '/services/:id/download_json(/:revision_id)' => 'services#download_json', as: 'download_json_service'
- resources :tools, only: [:show, :create, :edit]
+ resources :tools, only: [:show, :create, :edit, :destroy]
get '/tools/:id/clone' => 'tools#clone', as: 'clone_tool'
get '/tools/:id/revisions' => 'tools#revisions', as: 'tool_revisions'
diff --git a/spec/features/tools/delete_tool_spec.rb b/spec/features/tools/delete_tool_spec.rb
new file mode 100644
index 000000000..50c152752
--- /dev/null
+++ b/spec/features/tools/delete_tool_spec.rb
@@ -0,0 +1,96 @@
+require 'rails_helper'
+
+describe 'Delete tool', reset_provider: true, js: true do
+ before :all do
+ @ingested_tool, _concept_response, _native_id_1 = publish_tool_draft
+
+ @ingested_tool_for_delete_messages, _concept_response, @native_id_2 = publish_tool_draft
+ end
+
+ after :all do
+ delete_response = cmr_client.delete_tool('MMT_2', @native_id_2, 'token')
+ # First tool should be deleted in the delete test
+
+ raise unless delete_response.success?
+ end
+
+ before do
+ login
+ end
+
+ context 'when viewing a published tool' do
+ before do
+ visit tool_path(@ingested_tool['concept-id'])
+ end
+
+ it 'displays a delete link' do
+ expect(page).to have_content('Delete Tool Record')
+ end
+
+ context 'when clicking the delete link' do
+ before do
+ click_on 'Delete Tool Record'
+ end
+
+ it 'displays a confirmation modal' do
+ expect(page).to have_content('Are you sure you want to delete this tool record?')
+ end
+
+ context 'when clicking Yes' do
+ before do
+ within '#delete-record-modal' do
+ click_on 'Yes'
+ end
+ end
+
+ it 'redirects to the revisions page and displays a confirmation message' do
+ expect(page).to have_content('Revision History')
+
+ expect(page).to have_content('Tool Deleted Successfully!')
+ end
+ end
+ end
+ end
+
+ context 'when deleting the tool will fail' do
+ before do
+ visit tool_path(@ingested_tool_for_delete_messages['concept-id'])
+ end
+
+ context 'when CMR provides a message' do
+ before do
+ error_body = '{"errors": ["You do not have permission to perform that action."]}'
+ error_response = Cmr::Response.new(Faraday::Response.new(status: 401, body: JSON.parse(error_body), response_headers: {}))
+ allow_any_instance_of(Cmr::CmrClient).to receive(:delete_tool).and_return(error_response)
+
+ click_on 'Delete Tool Record'
+
+ within '#delete-record-modal' do
+ click_on 'Yes'
+ end
+ end
+
+ it 'displays the CMR error message' do
+ expect(page).to have_css('.eui-banner--danger', text: 'You do not have permission to perform that action.')
+ end
+ end
+
+ context 'when CMR does not provide a message' do
+ before do
+ error_body = '{"message": "useless message"}'
+ error_response = Cmr::Response.new(Faraday::Response.new(status: 401, body: JSON.parse(error_body), response_headers: {}))
+ allow_any_instance_of(Cmr::CmrClient).to receive(:delete_tool).and_return(error_response)
+
+ click_on 'Delete Tool Record'
+
+ within '#delete-record-modal' do
+ click_on 'Yes'
+ end
+ end
+
+ it 'displays the CMR error message' do
+ expect(page).to have_css('.eui-banner--danger', text: 'Tool was not deleted successfully')
+ end
+ end
+ end
+end
diff --git a/spec/features/tools/tool_permissions_spec.rb b/spec/features/tools/tool_permissions_spec.rb
index f01413565..0075c67f4 100644
--- a/spec/features/tools/tool_permissions_spec.rb
+++ b/spec/features/tools/tool_permissions_spec.rb
@@ -9,7 +9,7 @@
context "when the tool's provider is in the users available providers" do
before :all do
@ingested_tool, _concept_response, @native_id_1 = publish_tool_draft
-# @ingested_tool_for_delete_modal, _concept_response, _native_id_2 = publish_tool_draft
+ @ingested_tool_for_delete_modal, _concept_response, _native_id_2 = publish_tool_draft
end
after :all do
@@ -85,45 +85,53 @@
end
# TODO: Uncomment in MMT-2229
-# context 'when clicking the delete link' do
-# context 'when the tool has no associated collections' do
-# before do
-# visit tool_path(@ingested_tool_for_delete_modal['concept-id'])
-#
-# click_on 'Delete Tool Record'
-# end
-#
-# it 'displays a modal informing the user they need to switch providers' do
-# expect(page).to have_content("Deleting this tool #{modal_text}")
-# end
-#
-# it 'does not display a message about collection associations that will also be deleted' do
-# expect(page).to have_no_content('This tool is associated with')
-# expect(page).to have_no_content('collections. Deleting this tool will also delete the collection associations')
-# end
-# end
-#
-# context 'when deleting the tool' do
-# before do
-# ingested_tool_to_delete, _concept_response = publish_tool_draft
-#
-# visit tool_path(ingested_tool_to_delete['concept-id'])
-#
-# click_on 'Delete Tool Record'
-#
-# find('.not-current-provider-link').click
-# wait_for_jQuery
-# end
-#
-# it 'switches the provider context' do
-# expect(User.first.provider_id).to eq('MMT_2')
-# end
-#
-# it 'deletes the record' do
-# expect(page).to have_content('Tool Deleted Successfully!')
-# end
-# end
-# end
+ context 'when clicking the delete link' do
+
+ before do
+ login(provider: 'MMT_1', providers: %w(MMT_1 MMT_2))
+ visit tool_path(@ingested_tool_for_delete_modal['concept-id'])
+
+ click_on 'Delete Tool Record'
+ end
+
+ it 'displays a modal informing the user they need to switch providers' do
+ expect(page).to have_content("Deleting this tool #{modal_text}")
+ end
+
+ context 'when clicking Yes' do
+ before do
+ find('.not-current-provider-link').click
+ wait_for_jQuery
+ end
+
+ it 'switches the provider context' do
+ expect(User.first.provider_id).to eq('MMT_2')
+ end
+
+ it 'deletes the record' do
+ expect(page).to have_content('Tool Deleted Successfully!')
+ end
+ end
+
+ #context 'when deleting the tool' do
+ # before do
+ # visit tool_path(@ingested_tool_for_delete_modal['concept-id'])
+
+ # click_on 'Delete Tool Record'
+
+ # find('.not-current-provider-link').click
+ # wait_for_jQuery
+ # end
+
+ # it 'switches the provider context' do
+ # expect(User.first.provider_id).to eq('MMT_2')
+ # end
+
+ # it 'deletes the record' do
+ # expect(page).to have_content('Tool Deleted Successfully!')
+ # end
+ #end
+ end
context 'when trying to visit the action paths directly' do
context 'when visiting the edit path directly' do
From 986e2717e8af8c42bafc3fa54809401a3279d8fe Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Fri, 10 Jul 2020 10:13:18 -0400
Subject: [PATCH 05/49] MMT-2313: Added loss reporting route, added a display
action to the collections controller
---
app/controllers/collections_controller.rb | 8 ++++++--
config/routes.rb | 1 +
2 files changed, 7 insertions(+), 2 deletions(-)
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index 76a925f80..6df58c4b6 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -23,6 +23,10 @@ def show
end
end
+ def loss_report
+ render 'errors/internal_server_error'
+ end
+
def edit
draft = CollectionDraft.create_from_collection(@collection, current_user, @native_id)
Rails.logger.info("Audit Log: Collection Draft for #{draft.entry_title} was created by #{current_user.urs_uid} in provider #{current_user.provider_id}")
@@ -149,7 +153,7 @@ def set_collection
@download_xml_options.each do |download_option|
# gsub here is needed because of the iso-smap and application/iso:smap+xml format options
if native_format.gsub(':','').include?(download_option[:format].gsub('-', ''))
- download_option[:title].concat(' (Native)')
+ download_option[:title].concat(' (Native)')
@download_xml_options.delete(download_option)
@download_xml_options.unshift(download_option)
break
@@ -201,7 +205,7 @@ def proposal_mode_enabled?
super
end
end
-
+
def select_revision
selected = @revisions.select {|r| r.fetch('meta')['revision-id'] && r.fetch('meta')['deleted'] == false && r.fetch('meta')['revision-id'].to_i < @revision_id.to_i}.first
selected.blank? ? nil : selected.fetch('meta')['revision-id']
diff --git a/config/routes.rb b/config/routes.rb
index 4b61880ee..23cd27dff 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -69,6 +69,7 @@
get '/collections/:id/download_xml/:format(/:revision_id)' => 'collections#download_xml', as: 'download_collection_xml'
get '/collections/:id/create_delete_proposal' => 'collections#create_delete_proposal', as: 'create_delete_proposal_collection'
get '/collections/:id/create_update_proposal' => 'collections#create_update_proposal', as: 'create_update_proposal_collection'
+ get 'collections/:id/loss' => 'collections#loss_report'
resource :variable_generation_processes_search, only: [:new]
From 22a9fb8a916127df1810197bce3778f0f1fbfd00 Mon Sep 17 00:00:00 2001
From: Ryan Miller
Date: Fri, 10 Jul 2020 14:07:24 -0400
Subject: [PATCH 06/49] MMT-2229 Updating not current provider modal
---
.../_not_current_provider_modal.html.erb | 2 +-
spec/features/tools/tool_permissions_spec.rb | 34 +++++++++----------
2 files changed, 17 insertions(+), 19 deletions(-)
diff --git a/app/views/shared/_not_current_provider_modal.html.erb b/app/views/shared/_not_current_provider_modal.html.erb
index aeb5182bc..6f828700c 100644
--- a/app/views/shared/_not_current_provider_modal.html.erb
+++ b/app/views/shared/_not_current_provider_modal.html.erb
@@ -34,7 +34,7 @@
<% elsif options[:tool] %>
<%= link_to 'Yes', edit_tool_path(options[:concept_id], revision_id: options[:revision_id]), class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-edit-tool-link' %>
<%= link_to 'Yes', clone_tool_path(options[:concept_id], revision_id: options[:revision_id]), class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-clone-tool-link' %>
- <%#= link_to 'Yes', tool_path(options[:concept_id]), method: :delete, class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-delete-tool-link' %>
+ <%= link_to 'Yes', tool_path(options[:concept_id]), method: :delete, class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-delete-tool-link' %>
<% end %>
<% if options[:draft] %>
diff --git a/spec/features/tools/tool_permissions_spec.rb b/spec/features/tools/tool_permissions_spec.rb
index 0075c67f4..993d6fe8c 100644
--- a/spec/features/tools/tool_permissions_spec.rb
+++ b/spec/features/tools/tool_permissions_spec.rb
@@ -9,12 +9,10 @@
context "when the tool's provider is in the users available providers" do
before :all do
@ingested_tool, _concept_response, @native_id_1 = publish_tool_draft
- @ingested_tool_for_delete_modal, _concept_response, _native_id_2 = publish_tool_draft
end
after :all do
delete_response = cmr_client.delete_tool('MMT_2', @native_id_1, 'token')
- # Second tool should be deleted in the delete test
raise unless delete_response.success?
end
@@ -86,8 +84,8 @@
# TODO: Uncomment in MMT-2229
context 'when clicking the delete link' do
-
before do
+ @ingested_tool_for_delete_modal, _concept_response, @native_id_2 = publish_tool_draft
login(provider: 'MMT_1', providers: %w(MMT_1 MMT_2))
visit tool_path(@ingested_tool_for_delete_modal['concept-id'])
@@ -107,30 +105,30 @@
it 'switches the provider context' do
expect(User.first.provider_id).to eq('MMT_2')
end
-
+
it 'deletes the record' do
expect(page).to have_content('Tool Deleted Successfully!')
end
end
- #context 'when deleting the tool' do
- # before do
- # visit tool_path(@ingested_tool_for_delete_modal['concept-id'])
+ context 'when deleting the tool' do
+ before do
+ visit tool_path(@ingested_tool_for_delete_modal['concept-id'])
- # click_on 'Delete Tool Record'
+ click_on 'Delete Tool Record'
- # find('.not-current-provider-link').click
- # wait_for_jQuery
- # end
+ find('.not-current-provider-link').click
+ wait_for_jQuery
+ end
- # it 'switches the provider context' do
- # expect(User.first.provider_id).to eq('MMT_2')
- # end
+ it 'switches the provider context' do
+ expect(User.first.provider_id).to eq('MMT_2')
+ end
- # it 'deletes the record' do
- # expect(page).to have_content('Tool Deleted Successfully!')
- # end
- #end
+ it 'deletes the record' do
+ expect(page).to have_content('Tool Deleted Successfully!')
+ end
+ end
end
context 'when trying to visit the action paths directly' do
From db5af9598498f3a903f81bd0ef5053b2f392d48a Mon Sep 17 00:00:00 2001
From: William Valencia
Date: Sun, 12 Jul 2020 22:24:15 -0400
Subject: [PATCH 07/49] MMT-2229 Fixing the tool_permissions_spec
---
.../_not_current_provider_modal.html.erb | 2 +-
spec/features/tools/tool_permissions_spec.rb | 26 +------------------
2 files changed, 2 insertions(+), 26 deletions(-)
diff --git a/app/views/shared/_not_current_provider_modal.html.erb b/app/views/shared/_not_current_provider_modal.html.erb
index aeb5182bc..6f828700c 100644
--- a/app/views/shared/_not_current_provider_modal.html.erb
+++ b/app/views/shared/_not_current_provider_modal.html.erb
@@ -34,7 +34,7 @@
<% elsif options[:tool] %>
<%= link_to 'Yes', edit_tool_path(options[:concept_id], revision_id: options[:revision_id]), class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-edit-tool-link' %>
<%= link_to 'Yes', clone_tool_path(options[:concept_id], revision_id: options[:revision_id]), class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-clone-tool-link' %>
- <%#= link_to 'Yes', tool_path(options[:concept_id]), method: :delete, class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-delete-tool-link' %>
+ <%= link_to 'Yes', tool_path(options[:concept_id]), method: :delete, class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-delete-tool-link' %>
<% end %>
<% if options[:draft] %>
diff --git a/spec/features/tools/tool_permissions_spec.rb b/spec/features/tools/tool_permissions_spec.rb
index 0075c67f4..1ccd1edf8 100644
--- a/spec/features/tools/tool_permissions_spec.rb
+++ b/spec/features/tools/tool_permissions_spec.rb
@@ -84,9 +84,7 @@
end
end
-# TODO: Uncomment in MMT-2229
context 'when clicking the delete link' do
-
before do
login(provider: 'MMT_1', providers: %w(MMT_1 MMT_2))
visit tool_path(@ingested_tool_for_delete_modal['concept-id'])
@@ -104,33 +102,11 @@
wait_for_jQuery
end
- it 'switches the provider context' do
+ it 'switches the provider context and deletes the record' do
expect(User.first.provider_id).to eq('MMT_2')
- end
-
- it 'deletes the record' do
expect(page).to have_content('Tool Deleted Successfully!')
end
end
-
- #context 'when deleting the tool' do
- # before do
- # visit tool_path(@ingested_tool_for_delete_modal['concept-id'])
-
- # click_on 'Delete Tool Record'
-
- # find('.not-current-provider-link').click
- # wait_for_jQuery
- # end
-
- # it 'switches the provider context' do
- # expect(User.first.provider_id).to eq('MMT_2')
- # end
-
- # it 'deletes the record' do
- # expect(page).to have_content('Tool Deleted Successfully!')
- # end
- #end
end
context 'when trying to visit the action paths directly' do
From daaa4cfd232cd14459383f30a5840a27d62926f1 Mon Sep 17 00:00:00 2001
From: William Valencia
Date: Sun, 12 Jul 2020 22:45:32 -0400
Subject: [PATCH 08/49] Fixing spacing for tools_permissions_spec.rb
---
spec/features/tools/tool_permissions_spec.rb | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/spec/features/tools/tool_permissions_spec.rb b/spec/features/tools/tool_permissions_spec.rb
index cc77606de..dde06078b 100644
--- a/spec/features/tools/tool_permissions_spec.rb
+++ b/spec/features/tools/tool_permissions_spec.rb
@@ -221,4 +221,4 @@
end
end
end
-end
\ No newline at end of file
+end
From f58c6ad91a9c592032ef994423e44b676a277f2e Mon Sep 17 00:00:00 2001
From: ryanmiller-1 <40173609+ryanmiller-1@users.noreply.github.com>
Date: Mon, 13 Jul 2020 07:51:45 -0400
Subject: [PATCH 09/49] MMT-2234 (#607)
adding download json for tools
---
app/controllers/tools_controller.rb | 2 +-
app/views/tools/show.html.erb | 3 +-
config/routes.rb | 1 +
.../collections/downloading_xml_spec.rb | 16 +++----
spec/features/services/download_json_spec.rb | 18 ++++++++
spec/features/tools/download_json_spec.rb | 42 +++++++++++++++++++
spec/features/variables/download_json_spec.rb | 18 ++++++++
7 files changed, 90 insertions(+), 10 deletions(-)
create mode 100644 spec/features/tools/download_json_spec.rb
diff --git a/app/controllers/tools_controller.rb b/app/controllers/tools_controller.rb
index 2c5686dbe..6402e8db0 100644
--- a/app/controllers/tools_controller.rb
+++ b/app/controllers/tools_controller.rb
@@ -2,7 +2,7 @@
class ToolsController < BasePublishedRecordController
include ManageMetadataHelper
- before_action :set_tool, only: [:show, :edit, :clone, :revisions] #, :destroy, :revert, :download_json]
+ before_action :set_tool, only: [:show, :edit, :clone, :revisions, :download_json] #, :destroy, :revert]
before_action :set_schema, only: [:show, :edit, :clone] #, :destroy]
before_action :ensure_supported_version, only: [:show, :edit]
before_action :ensure_correct_provider, only: [:edit, :clone] #, :destroy]
diff --git a/app/views/tools/show.html.erb b/app/views/tools/show.html.erb
index 14240393e..72f91d48c 100644
--- a/app/views/tools/show.html.erb
+++ b/app/views/tools/show.html.erb
@@ -98,8 +98,7 @@
<%= link_to 'Clone Tool Record', '#not-current-provider-modal', class: 'display-modal not-current-provider eui-btn--link bar-after', data: { 'provider': @provider_id, record_action: 'clone-tool' } %>
<% end %>
- <%= link_to 'Download JSON', '#', class: 'eui-btn--link disabled' %>
- <%#= link_to 'Download JSON', download_json_tool_path(@concept_id, revision_id: @revision_id), class: 'eui-btn--link', target: '_blank' %>
+ <%= link_to 'Download JSON', download_json_tool_path(@concept_id, revision_id: @revision_id), class: 'eui-btn--link', target: '_blank' %>
<%= link_to 'Delete Tool Record', '#', class: 'display-modal delete-tool eui-btn--link bar-before disabled' %>
<%# if current_provider?(@provider_id) %>
diff --git a/config/routes.rb b/config/routes.rb
index ce35fbe4d..619d95aff 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -106,6 +106,7 @@
resources :tools, only: [:show, :create, :edit]
get '/tools/:id/clone' => 'tools#clone', as: 'clone_tool'
get '/tools/:id/revisions' => 'tools#revisions', as: 'tool_revisions'
+ get '/tools/:id/download_json(/:revision_id)' => 'tools#download_json', as: 'download_json_tool'
resources :variable_drafts, controller: 'variable_drafts', draft_type: 'VariableDraft' do
member do
diff --git a/spec/features/collections/downloading_xml_spec.rb b/spec/features/collections/downloading_xml_spec.rb
index 81ed7e69b..60107c477 100644
--- a/spec/features/collections/downloading_xml_spec.rb
+++ b/spec/features/collections/downloading_xml_spec.rb
@@ -26,16 +26,17 @@
before do
@file = "#{Rails.root}/#{@concept_id}.echo10"
click_on 'ECHO 10'
- end
- after do
# Seems to need a brief (>0.01) pause to actually find the file.
sleep(0.1)
+ end
+
+ after do
FileUtils.rm @file if File.exist?(@file)
end
it 'downloads the file' do
- expect(File.exist?(@file))
+ expect(File.exist?(@file)).to eq(true)
end
end
end
@@ -45,7 +46,7 @@
before do
login
visit manage_collections_path
-
+
short_name = 'SPL4SMAU'
fill_in 'keyword', with: short_name
click_on 'Search Collections'
@@ -76,16 +77,17 @@
before do
@file = "#{Rails.root}/#{@concept_id}.iso-smap"
click_on 'ISO 19115 (SMAP) (Native)'
- end
- after do
# Seems to need a brief (>0.01) pause to actually find the file.
sleep(0.1)
+ end
+
+ after do
FileUtils.rm @file if File.exist?(@file)
end
it 'downloads the file' do
- expect(File.exist?(@file))
+ expect(File.exist?(@file)).to eq(true)
end
end
end
diff --git a/spec/features/services/download_json_spec.rb b/spec/features/services/download_json_spec.rb
index 7f9f8bb42..52299efd0 100644
--- a/spec/features/services/download_json_spec.rb
+++ b/spec/features/services/download_json_spec.rb
@@ -13,5 +13,23 @@
it 'renders the download link' do
expect(page).to have_link('Download JSON', href: download_json_service_path(@ingest_response['concept-id']))
end
+
+ context 'when downloading the json' do
+ before do
+ @file = "#{Rails.root}/#{@ingest_response['concept-id']}.json"
+ click_on 'Download JSON'
+
+ # Seems to need a brief (>0.01) pause to actually find the file.
+ sleep(0.1)
+ end
+
+ after do
+ FileUtils.rm @file if File.exist?(@file)
+ end
+
+ it 'downloads the file' do
+ expect(File.exist?(@file)).to eq(true)
+ end
+ end
end
end
diff --git a/spec/features/tools/download_json_spec.rb b/spec/features/tools/download_json_spec.rb
new file mode 100644
index 000000000..b52ee6006
--- /dev/null
+++ b/spec/features/tools/download_json_spec.rb
@@ -0,0 +1,42 @@
+describe 'Downloading Tool JSON', js: true do
+ before :all do
+ @ingest_response, _concept_response, @native_id = publish_tool_draft
+ end
+
+ # TODO: remove after CMR-6332
+ after :all do
+ delete_response = cmr_client.delete_tool('MMT_2', @native_id, 'token')
+
+ raise unless delete_response.success?
+ end
+
+ context 'when viewing the tool preview page' do
+ before do
+ login
+
+ visit tool_path(@ingest_response['concept-id'])
+ end
+
+ it 'renders the download link' do
+ expect(page).to have_link('Download JSON', href: download_json_tool_path(@ingest_response['concept-id']))
+ end
+
+ context 'when downloading the json' do
+ before do
+ @file = "#{Rails.root}/#{@ingest_response['concept-id']}.json"
+ click_on 'Download JSON'
+
+ # Seems to need a brief (>0.01) pause to actually find the file.
+ sleep(0.1)
+ end
+
+ after do
+ FileUtils.rm @file if File.exist?(@file)
+ end
+
+ it 'downloads the file' do
+ expect(File.exist?(@file)).to eq(true)
+ end
+ end
+ end
+end
diff --git a/spec/features/variables/download_json_spec.rb b/spec/features/variables/download_json_spec.rb
index 6ac90d28d..739d26e62 100644
--- a/spec/features/variables/download_json_spec.rb
+++ b/spec/features/variables/download_json_spec.rb
@@ -13,5 +13,23 @@
it 'renders the download link' do
expect(page).to have_link('Download JSON', href: download_json_variable_path(@ingest_response['concept-id']))
end
+
+ context 'when downloading the json' do
+ before do
+ @file = "#{Rails.root}/#{@ingest_response['concept-id']}.json"
+ click_on 'Download JSON'
+
+ # Seems to need a brief (>0.01) pause to actually find the file.
+ sleep(0.1)
+ end
+
+ after do
+ FileUtils.rm @file if File.exist?(@file)
+ end
+
+ it 'downloads the file' do
+ expect(File.exist?(@file)).to eq(true)
+ end
+ end
end
end
From 4fa3f5cfae2125dd3e99ffbba985747c5721c4d6 Mon Sep 17 00:00:00 2001
From: ryanmiller-1 <40173609+ryanmiller-1@users.noreply.github.com>
Date: Mon, 13 Jul 2020 11:11:06 -0400
Subject: [PATCH 10/49] Hotfix/MMT-2231-1 Updating revisions helpers to sort by
revision date (#615)
* MMT-2231 updated revisions testing
* MMT-2231 updating collection sorting and tests
---
app/concerns/cmr_collections_helper.rb | 2 +-
app/controllers/manage_metadata_controller.rb | 6 ++---
.../collections/revision_list_spec.rb | 18 +++++++------
spec/features/services/revision_list_spec.rb | 25 +++++++++++++------
spec/features/tools/revision_list_spec.rb | 16 +++++++++---
5 files changed, 45 insertions(+), 22 deletions(-)
diff --git a/app/concerns/cmr_collections_helper.rb b/app/concerns/cmr_collections_helper.rb
index f7ea2ad96..4e8502eec 100644
--- a/app/concerns/cmr_collections_helper.rb
+++ b/app/concerns/cmr_collections_helper.rb
@@ -6,7 +6,7 @@ def get_revisions(concept_id, revision_id)
# try again because CMR might be a little slow to index if it is a newly published revision
attempts = 0
while attempts < 20
- revisions_response = cmr_client.get_collections({ concept_id: concept_id, all_revisions: true, include_granule_counts: true }, token)
+ revisions_response = cmr_client.get_collections({ concept_id: concept_id, all_revisions: true, include_granule_counts: true, sort_key: '-revision_date' }, token)
revisions = if revisions_response.success?
revisions_response.body.fetch('items', [])
else
diff --git a/app/controllers/manage_metadata_controller.rb b/app/controllers/manage_metadata_controller.rb
index a7d98f700..cdb465b8a 100644
--- a/app/controllers/manage_metadata_controller.rb
+++ b/app/controllers/manage_metadata_controller.rb
@@ -110,7 +110,7 @@ def set_variable_information
# if the variable is not found, try again because CMR might be a little slow to index if it is a newly published record
attempts = 0
while attempts < 20
- variables_search_response = cmr_client.get_variables(concept_id: @concept_id, all_revisions: true)
+ variables_search_response = cmr_client.get_variables(concept_id: @concept_id, all_revisions: true, sort_key: '-revision_date')
variable_data = if variables_search_response.success?
variables_search_response.body.fetch('items', [])
@@ -163,7 +163,7 @@ def set_service_information
# if the service is not found, try again because CMR might be a little slow to index if it is a newly published record
attempts = 0
while attempts < 20
- services_search_response = cmr_client.get_services(concept_id: @concept_id, all_revisions: true)
+ services_search_response = cmr_client.get_services(concept_id: @concept_id, all_revisions: true, sort_key: '-revision_date')
service_data = if services_search_response.success?
services_search_response.body.fetch('items', [])
@@ -218,7 +218,7 @@ def set_tool_information
# if the tool is not found, try again because CMR might be a little slow to index if it is a newly published record
attempts = 0
while attempts < 20
- tools_search_response = cmr_client.get_tools(concept_id: @concept_id, all_revisions: true)
+ tools_search_response = cmr_client.get_tools(concept_id: @concept_id, all_revisions: true, sort_key: '-revision_date')
tool_data = if tools_search_response.success?
tools_search_response.body.fetch('items', [])
diff --git a/spec/features/collections/revision_list_spec.rb b/spec/features/collections/revision_list_spec.rb
index 9d3c57bb9..60ba78a42 100644
--- a/spec/features/collections/revision_list_spec.rb
+++ b/spec/features/collections/revision_list_spec.rb
@@ -1,15 +1,19 @@
describe 'Revision list', js: true do
context 'when viewing a published collection' do
+ before :all do
+ native_id = 'collection_revision_native_id'
+ _ingest_response, _concept_response = publish_collection_draft(native_id: native_id, revision_count: 10, short_name: 'b_test_01')
+ @ingest_response, @concept_response = publish_collection_draft(native_id: native_id, short_name: 'c_test_01')
+ end
+
before do
login
- ingest_response, @concept_response = publish_collection_draft(revision_count: 2)
-
- visit collection_path(ingest_response['concept-id'])
+ visit collection_path(@ingest_response['concept-id'])
end
it 'displays the number of revisions' do
- expect(page).to have_content('Revisions (2)')
+ expect(page).to have_content('Revisions (10)')
end
context 'when clicking on the revision link' do
@@ -27,15 +31,15 @@
end
it 'displays when the revision was made' do
- expect(page).to have_content(today_string, count: 2)
+ expect(page).to have_content(today_string, count: 10)
end
it 'displays what user made the revision' do
- expect(page).to have_content('typical', count: 2)
+ expect(page).to have_content('typical', count: 10)
end
it 'displays the correct phrasing for reverting records' do
- expect(page).to have_content('Revert to this Revision', count: 1)
+ expect(page).to have_content('Revert to this Revision', count: 9)
end
context 'when viewing an old revision' do
diff --git a/spec/features/services/revision_list_spec.rb b/spec/features/services/revision_list_spec.rb
index 6fbe57f1d..b560aad7f 100644
--- a/spec/features/services/revision_list_spec.rb
+++ b/spec/features/services/revision_list_spec.rb
@@ -1,15 +1,22 @@
describe 'Service revision list', reset_provider: true, js: true do
context 'when viewing a published service' do
+ before :all do
+ # CMR does not return revisions sorted by revision_id. It sorts
+ # by name first (and maybe other things). If the sort_key is working
+ # correctly, the last revision (c_test_01), should be visible on the page
+ native_id = 'service_revision_native_id'
+ _ingest_response, _concept_response = publish_service_draft(native_id: native_id, revision_count: 10, name: 'b_test_01')
+ @ingest_response, @concept_response = publish_service_draft(native_id: native_id, name: 'c_test_01')
+ end
+
before do
login
- ingest_response, @concept_response = publish_service_draft(revision_count: 2)
-
- visit service_path(ingest_response['concept-id'])
+ visit service_path(@ingest_response['concept-id'])
end
it 'displays the number of revisions' do
- expect(page).to have_content('Revisions (2)')
+ expect(page).to have_content('Revisions (10)')
end
context 'when clicking on the revision link' do
@@ -27,15 +34,19 @@
end
it 'displays when the revision was made' do
- expect(page).to have_content(today_string, count: 2)
+ expect(page).to have_content(today_string, count: 10)
end
it 'displays what user made the revision' do
- expect(page).to have_content('typical', count: 2)
+ expect(page).to have_content('typical', count: 10)
+ end
+
+ it 'displays the most recent revisions' do
+ expect(page).to have_content('11 - Published')
end
it 'displays the correct phrasing for reverting records' do
- expect(page).to have_content('Revert to this Revision', count: 1)
+ expect(page).to have_content('Revert to this Revision', count: 9)
end
context 'when viewing an old revision' do
diff --git a/spec/features/tools/revision_list_spec.rb b/spec/features/tools/revision_list_spec.rb
index b33ba78f1..0ef91f2a9 100644
--- a/spec/features/tools/revision_list_spec.rb
+++ b/spec/features/tools/revision_list_spec.rb
@@ -1,7 +1,11 @@
describe 'Tool revision list', reset_provider: true, js: true do
context 'when viewing a published tool' do
before :all do
- @ingest_response, @concept_response, @native_id = publish_tool_draft(revision_count: 2)
+ # CMR does not return revisions sorted by revision_id. It sorts
+ # by name first (and maybe other things). If the sort_key is working
+ # correctly, the last revision (c_test_01), should be visible on the page
+ _ingest_response, _concept_response, @native_id = publish_tool_draft(revision_count: 10, name: 'b_test_01')
+ @ingest_response, @concept_response, _native_id = publish_tool_draft(native_id: @native_id, name: 'c_test_01')
end
# TODO: remove after CMR-6332
@@ -18,7 +22,7 @@
end
it 'displays the number of revisions' do
- expect(page).to have_content('Revisions (2)')
+ expect(page).to have_content('Revisions (10)')
end
context 'when clicking on the revision link' do
@@ -36,11 +40,15 @@
end
it 'displays when the revision was made' do
- expect(page).to have_content(today_string, count: 2)
+ expect(page).to have_content(today_string, count: 10)
end
it 'displays what user made the revision' do
- expect(page).to have_content('typical', count: 2)
+ expect(page).to have_content('typical', count: 10)
+ end
+
+ it 'displays the most recent revisions' do
+ expect(page).to have_content('11 - Published')
end
# TODO: Uncomment in MMT-2233
From 57b82a3537348a96065fda65039b1c7fcbc53e92 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Mon, 13 Jul 2020 11:30:07 -0400
Subject: [PATCH 11/49] MMT-2313: added nokogiri/diff to gemfile, added hidden
url endpoint for loss report
---
.gitignore | 3 +
Gemfile | 2 +
Gemfile.lock | 5 +
app/controllers/collections_controller.rb | 2 +-
app/helpers/loss_report_helper.rb | 241 ++++++++++++++++++
.../collection_drafts/loss_report.html.erb | 23 ++
6 files changed, 275 insertions(+), 1 deletion(-)
create mode 100644 app/helpers/loss_report_helper.rb
create mode 100644 app/views/collection_drafts/loss_report.html.erb
diff --git a/.gitignore b/.gitignore
index 79593f013..25b8f5332 100644
--- a/.gitignore
+++ b/.gitignore
@@ -57,3 +57,6 @@ nohup.out
#backup files
*~
+
+#ignore package
+package.json
diff --git a/Gemfile b/Gemfile
index f7b917df1..4198d27fc 100644
--- a/Gemfile
+++ b/Gemfile
@@ -73,6 +73,8 @@ gem 'aasm'
gem 'browser'
+gem 'nokogiri-diff', '~> 0.2.0' # for comparing xml documents
+
# collections metadata preview
# run this command to work from a local copy of the gem's repo
# bundle config local.cmr_metadata_preview /path/to/local/git/repository
diff --git a/Gemfile.lock b/Gemfile.lock
index 61ce28ff4..493f42aa4 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -195,6 +195,9 @@ GEM
nio4r (2.5.2)
nokogiri (1.10.9)
mini_portile2 (~> 2.4.0)
+ nokogiri-diff (0.2.0)
+ nokogiri (~> 1.5)
+ tdiff (~> 0.3, >= 0.3.2)
parallel (1.19.1)
parser (2.7.1.2)
ast (~> 2.4.0)
@@ -297,6 +300,7 @@ GEM
activesupport (>= 4.0)
sprockets (>= 3.0.0)
sqlite3 (1.4.2)
+ tdiff (0.3.4)
thor (1.0.1)
thread_safe (0.3.6)
tilt (2.0.10)
@@ -365,6 +369,7 @@ DEPENDENCIES
mini_racer
momentjs-rails
multi_xml
+ nokogiri-diff (~> 0.2.0)
pg
pundit
rack_session_access
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index 6df58c4b6..4da86ea50 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -24,7 +24,7 @@ def show
end
def loss_report
- render 'errors/internal_server_error'
+ render 'collection_drafts/loss_report'
end
def edit
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
new file mode 100644
index 000000000..604f6a383
--- /dev/null
+++ b/app/helpers/loss_report_helper.rb
@@ -0,0 +1,241 @@
+module LossReportHelper
+
+ def prepare_collections(concept_id, format, umm_c_version)
+ # TODO: need to add exception handling for get_concept, translate_collection
+ original_collection_xml = cmr_client.get_concept(concept_id,token, {})
+ original_collection_hash = Hash.from_xml(original_collection_xml.body)
+ translated_collection_umm = cmr_client.translate_collection(original_collection_xml.body, "application/#{format}+xml", "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", skip_validation=true)
+ translated_collection_xml = cmr_client.translate_collection(translated_collection_umm.body.to_json, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", "application/#{format}+xml", skip_validation=true)
+ translated_collection_hash = Hash.from_xml(translated_collection_xml.body)
+ return original_collection_xml.body, translated_collection_xml.body
+ end
+
+ def path_leads_to_list?(path, org_hash, conv_hash)
+ org_hash_path = hash_navigation(path, org_hash)
+ conv_hash_path = hash_navigation(path, conv_hash)
+
+ if org_hash_path == 'flag' || conv_hash_path == 'flag'
+ return false
+ end
+
+ if path.include?("[") && path.include?("]")
+ bool = true
+ elsif org_hash_path.is_a?(Hash) && conv_hash_path.is_a?(Hash)
+ org_hash_path.keys.each { |key| bool = true; break if org_hash_path[key].is_a?(Array) }
+ conv_hash_path.keys.each { |key| bool = true; break if conv_hash_path[key].is_a?(Array) }
+ elsif org_hash_path.is_a?(Array) || conv_hash_path.is_a?(Array)
+ bool = true
+ else
+ bool = false
+ end
+ bool
+ end
+
+ def hash_navigation(dir, hash)
+ dir = dir.split("/")
+ if dir.is_a? Array
+ dir.each do |key|
+ if !key.empty? && hash.is_a?(Hash)
+ hash = hash[key]
+ elsif hash.is_a? Array
+ return 'flag'
+ end
+ end
+ else
+ hash = hash[dir]
+ end
+ hash
+ end
+
+ def get_list_paths(dif_hash, original, converted)
+ values_list = hash_to_list_of_values(dif_hash)
+ paths = Array.new
+
+ for item in values_list
+ org_path = get_dir(item, original)
+ conv_path = get_dir(item, converted)
+
+ if org_path.include? "[]"
+ path = org_path
+ elsif conv_path.include? "[]"
+ path = conv_path
+ else
+ path = org_path #arbitrary
+ end
+
+ if path.include? "[]"
+ path = path.split "[]"
+ paths << path[0] unless paths.any? { |p| p.eql? path[0] }
+ elsif path_leads_to_list?(path, original, converted)
+ paths << path unless paths.any? { |p| p.eql? path }
+ end
+ end
+ paths
+ end
+
+ def compare_arrays(diff_hash, original_hash, converted_hash)
+ dif_hash = diff_hash.clone
+ original = original_hash.clone
+ converted = converted_hash.clone
+ paths = get_list_paths(dif_hash, original, converted)
+
+ paths.each do |path|
+ org_array = hash_navigation(path, original)
+ org_arr = org_array.clone
+ conv_array = hash_navigation(path, converted)
+ conv_arr = conv_array.clone
+
+ org_arr = Array.wrap(org_arr) unless org_arr.is_a?(Array)
+ org_array = Array.wrap(org_array) unless org_array.is_a?(Array)
+ conv_arr = Array.wrap(conv_arr) unless conv_arr.is_a?(Array)
+ conv_array = Array.wrap(conv_array) unless conv_array.is_a?(Array)
+
+ for conv_item in conv_array
+ for org_item in org_array
+ if org_item.eql? conv_item
+ org_arr.delete(org_item)
+ break
+ end
+ end
+ end
+
+ for org_item in org_array
+ for conv_item in conv_array
+ if org_item.eql? conv_item
+ conv_arr.delete(conv_item)
+ break
+ end
+ end
+ end
+
+ org_arr.each do |item|
+ path_with_index = path + "[#{org_array.index(item)}]"
+ puts "-: ".ljust(60) + path_with_index
+ end
+
+ conv_arr.each do |item|
+ path_with_index = path + "[#{conv_array.index(item)}]"
+ puts "+: ".ljust(60) + path_with_index #THIS INDEX DOESN'T MAKE SENSE
+ end
+ end
+ end
+
+ def find_difference_bt_hash_arrays(org_arr, conv_arr)
+ org = org_arr.clone
+ conv = conv_arr.clone
+ missing = Array.new
+ if org.eql? conv
+ return missing
+ else
+ for conv_item in conv
+ for org_item in org
+ if org_item.eql? conv_item
+ org.delete(conv_item)
+ break
+ end
+ end
+ end
+ missing += org
+ end
+ missing
+ end
+
+ def find_difference_bt_hashes(org, conv)
+ missing = Hash.new
+ if org.eql? conv
+ return missing
+ else
+ org.each do |org_key,org_value|
+ conv_value = conv[org_key]
+ if conv.key? org_key
+ if conv_value.eql? org_value
+ next
+ elsif org_value.is_a?(Hash) && conv_value.is_a?(Hash)
+ missing_value = find_difference_bt_hashes(org_value, conv_value)
+ unless missing_value.empty?
+ missing[org_key] = missing_value
+ end
+ elsif org_value.is_a?(Array) && conv_value.is_a?(Array)
+ missing_value = find_difference_bt_hash_arrays(org_value, conv_value)
+ unless missing_value.empty?
+ missing[org_key] = missing_value
+ end
+ else
+ missing[org_key] = org_value
+ end
+ else
+ missing[org_key] = org_value
+ end
+ end
+ end
+ missing
+ end
+
+ def get_dir(value, hash_or_arr)
+ iterable = hash_or_arr.clone
+ dir = String.new
+ if iterable.is_a? Hash
+ unless iterable.key(value).nil?
+ matching_key = iterable.key(value)
+ dir += '/' + matching_key
+ iterable.delete(matching_key)
+ return dir
+ else
+ iterable.each do |key,val|
+ if val.is_a?(Hash) && hash_to_list_of_values(val).include?(value)
+ dir += '/' + key
+ dir += get_dir(value, val)
+ return dir
+ elsif val.is_a?(Array) && array_to_list_of_values(val).include?(value)
+ dir += '/' + key + "[]"
+ dir += get_dir(value, val)
+ return dir
+ elsif val.eql? value
+ dir += '/' + key
+ iterable.delete(key)
+ return dir
+ end
+ end
+ end
+ elsif iterable.is_a? Array
+ iterable.each do |item|
+ if item.is_a?(Hash) && hash_to_list_of_values(item).include?(value)
+ dir += get_dir(value,item)
+ return dir
+ elsif item.is_a?(Array) && array_to_list_of_values(item).include?(value)
+ dir += get_dir(value,item) + "[]"
+ return dir
+ end
+ end
+ end
+ dir
+ end
+
+ def hash_to_list_of_values(hash)
+ list = Array.new
+ for val in hash.values
+ if val.is_a? Hash
+ list += hash_to_list_of_values(val)
+ elsif val.is_a? Array
+ list += array_to_list_of_values(val)
+ else
+ list << val
+ end
+ end
+ list
+ end
+
+ def array_to_list_of_values(array)
+ ls = Array.new
+ for item in array
+ if item.is_a? Hash
+ ls += hash_to_list_of_values(item)
+ elsif item.is_a? Array
+ ls += array_to_list_of_values(item)
+ else
+ ls << item
+ end
+ end
+ ls
+ end
+end
diff --git a/app/views/collection_drafts/loss_report.html.erb b/app/views/collection_drafts/loss_report.html.erb
new file mode 100644
index 000000000..d659e036f
--- /dev/null
+++ b/app/views/collection_drafts/loss_report.html.erb
@@ -0,0 +1,23 @@
+
+
+
+ | <%= 'Alteration' %> |
+ <%= 'Path' %> |
+
+
+
+ <% orig,conv = prepare_collections('C1200000085-NSIDC_ECS', 'echo10', '1.15.3') %>
+ <% orig = Nokogiri::XML(orig) { |config| config.strict.noblanks } %>
+ <% conv = Nokogiri::XML(conv) { |config| config.strict.noblanks } %>
+ <% orig.diff(conv, {:added => true, :removed => true}) do |change,node| %>
+
+ |
+ <%= change %>
+ |
+
+ <%= node.parent.path %>
+ |
+
+ <% end %>
+
+
From 49c7dd962110e24a6fcd1f3a3ece3260ef4137d0 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Mon, 13 Jul 2020 13:14:08 -0400
Subject: [PATCH 12/49] MMT-2313: added loss reporting for arrays
---
app/helpers/loss_report_helper.rb | 13 ++++++++++---
.../collection_drafts/loss_report.html.erb | 17 ++++++++++++++---
2 files changed, 24 insertions(+), 6 deletions(-)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 604f6a383..fc006c1a3 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -7,7 +7,7 @@ def prepare_collections(concept_id, format, umm_c_version)
translated_collection_umm = cmr_client.translate_collection(original_collection_xml.body, "application/#{format}+xml", "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", skip_validation=true)
translated_collection_xml = cmr_client.translate_collection(translated_collection_umm.body.to_json, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", "application/#{format}+xml", skip_validation=true)
translated_collection_hash = Hash.from_xml(translated_collection_xml.body)
- return original_collection_xml.body, translated_collection_xml.body
+ return original_collection_xml.body, translated_collection_xml.body, original_collection_hash, translated_collection_hash
end
def path_leads_to_list?(path, org_hash, conv_hash)
@@ -73,12 +73,14 @@ def get_list_paths(dif_hash, original, converted)
paths
end
- def compare_arrays(diff_hash, original_hash, converted_hash)
- dif_hash = diff_hash.clone
+ def compare_arrays(original_hash, converted_hash, dh=false)
+ dh ? dif_hash = dh.clone : dif_hash = find_difference_bt_hashes(original_hash, converted_hash).clone
original = original_hash.clone
converted = converted_hash.clone
paths = get_list_paths(dif_hash, original, converted)
+ output = Array.new
+
paths.each do |path|
org_array = hash_navigation(path, original)
org_arr = org_array.clone
@@ -111,13 +113,18 @@ def compare_arrays(diff_hash, original_hash, converted_hash)
org_arr.each do |item|
path_with_index = path + "[#{org_array.index(item)}]"
puts "-: ".ljust(60) + path_with_index
+ loss_item = ['-', path_with_index]
+ output << loss_item
end
conv_arr.each do |item|
path_with_index = path + "[#{conv_array.index(item)}]"
puts "+: ".ljust(60) + path_with_index #THIS INDEX DOESN'T MAKE SENSE
+ loss_item = ['+', path_with_index]
+ output << loss_item
end
end
+ output
end
def find_difference_bt_hash_arrays(org_arr, conv_arr)
diff --git a/app/views/collection_drafts/loss_report.html.erb b/app/views/collection_drafts/loss_report.html.erb
index d659e036f..702b54024 100644
--- a/app/views/collection_drafts/loss_report.html.erb
+++ b/app/views/collection_drafts/loss_report.html.erb
@@ -1,15 +1,16 @@
- | <%= 'Alteration' %> |
- <%= 'Path' %> |
+ <%= 'Alteration' %> |
+ <%= 'Path' %> |
- <% orig,conv = prepare_collections('C1200000085-NSIDC_ECS', 'echo10', '1.15.3') %>
+ <% orig,conv,orig_h,conv_h = prepare_collections('C1200000085-NSIDC_ECS', 'echo10', '1.15.3') %>
<% orig = Nokogiri::XML(orig) { |config| config.strict.noblanks } %>
<% conv = Nokogiri::XML(conv) { |config| config.strict.noblanks } %>
<% orig.diff(conv, {:added => true, :removed => true}) do |change,node| %>
+ <% next if path_leads_to_list?(node.parent.path, orig_h, conv_h) %>
|
<%= change %>
@@ -19,5 +20,15 @@
|
<% end %>
+ <% compare_arrays(orig_h, conv_h).each do |item| %>
+
+ |
+ <%= item[0] %>
+ |
+
+ <%= item[1] %>
+ |
+
+ <% end %>
From 67676fcfcbc7914241a5c8a0747896ac654dab46 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Mon, 13 Jul 2020 14:14:07 -0400
Subject: [PATCH 13/49] MMT-2313: Added comment to collections_controller
---
app/controllers/collections_controller.rb | 10 ++++++----
.../loss_report.html.erb | 0
2 files changed, 6 insertions(+), 4 deletions(-)
rename app/views/{collection_drafts => collections}/loss_report.html.erb (100%)
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index 4da86ea50..da1c79d88 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -23,10 +23,6 @@ def show
end
end
- def loss_report
- render 'collection_drafts/loss_report'
- end
-
def edit
draft = CollectionDraft.create_from_collection(@collection, current_user, @native_id)
Rails.logger.info("Audit Log: Collection Draft for #{draft.entry_title} was created by #{current_user.urs_uid} in provider #{current_user.provider_id}")
@@ -119,6 +115,12 @@ def create_update_proposal
redirect_to collection_draft_proposal_path(proposal)
end
+ def loss_report
+ # When a user wants to use MMT to edit metadata that currently exists in a non-UMM form,
+ # it's important that they're able to see if any data loss occurs in the translation to umm.
+ # This method is needed to reference the appropriate helper and view for the lossiness report
+ end
+
private
def ensure_correct_collection_provider
diff --git a/app/views/collection_drafts/loss_report.html.erb b/app/views/collections/loss_report.html.erb
similarity index 100%
rename from app/views/collection_drafts/loss_report.html.erb
rename to app/views/collections/loss_report.html.erb
From 9eec788ef4d036fad022c5773a6b8440d470b9b6 Mon Sep 17 00:00:00 2001
From: William Valencia
Date: Mon, 13 Jul 2020 21:17:17 -0400
Subject: [PATCH 14/49] MMT-2229 Modifying delete_tool_spec due to comments and
fixing tool_premissions_spec because of CMR-6332
---
spec/features/tools/delete_tool_spec.rb | 3 +--
spec/features/tools/tool_permissions_spec.rb | 16 +++++++++++++---
2 files changed, 14 insertions(+), 5 deletions(-)
diff --git a/spec/features/tools/delete_tool_spec.rb b/spec/features/tools/delete_tool_spec.rb
index 50c152752..d81a0bf79 100644
--- a/spec/features/tools/delete_tool_spec.rb
+++ b/spec/features/tools/delete_tool_spec.rb
@@ -1,5 +1,3 @@
-require 'rails_helper'
-
describe 'Delete tool', reset_provider: true, js: true do
before :all do
@ingested_tool, _concept_response, _native_id_1 = publish_tool_draft
@@ -7,6 +5,7 @@
@ingested_tool_for_delete_messages, _concept_response, @native_id_2 = publish_tool_draft
end
+ # Remove this section after CMR-6332 is resolved
after :all do
delete_response = cmr_client.delete_tool('MMT_2', @native_id_2, 'token')
# First tool should be deleted in the delete test
diff --git a/spec/features/tools/tool_permissions_spec.rb b/spec/features/tools/tool_permissions_spec.rb
index dde06078b..c68e986f6 100644
--- a/spec/features/tools/tool_permissions_spec.rb
+++ b/spec/features/tools/tool_permissions_spec.rb
@@ -93,6 +93,10 @@
it 'displays a modal informing the user they need to switch providers' do
expect(page).to have_content("Deleting this tool #{modal_text}")
+
+ # Remove this section after CMR-6332 is resolved
+ delete_response = cmr_client.delete_tool('MMT_2', @native_id_2, 'token')
+ raise unless delete_response.success?
end
context 'when clicking Yes' do
@@ -172,16 +176,22 @@
context 'when the tools provider is not in the users available providers' do
before do
- @ingested_not_available_provider_tool, _concept_response = publish_tool_draft(provider_id: 'SEDAC')
+ @ingested_not_available_provider_tool, _concept_response, @native_id_3 = publish_tool_draft(provider_id: 'SEDAC')
visit tool_path(@ingested_not_available_provider_tool['concept-id'])
end
+ # Remove this section after CMR-6332 is resolved
+ after do
+ delete_response = cmr_client.delete_tool('SEDAC', @native_id_3, 'token')
+
+ raise unless delete_response.success?
+ end
+
it 'does not display the action links' do
expect(page).to have_no_link('Edit Tool Record')
expect(page).to have_no_link('Clone Tool Record')
-# TODO: Uncomment in MMT-2229
-# expect(page).to have_no_link('Delete Tool Record')
+ expect(page).to have_no_link('Delete Tool Record')
end
context 'when trying to visit the action paths directly' do
From e1eedaac36b246dc398c4c2b5e7f62974a31316d Mon Sep 17 00:00:00 2001
From: William Valencia
Date: Mon, 13 Jul 2020 21:23:55 -0400
Subject: [PATCH 15/49] MMT-2229 Removing unneeded delete
---
spec/features/tools/tool_permissions_spec.rb | 7 -------
1 file changed, 7 deletions(-)
diff --git a/spec/features/tools/tool_permissions_spec.rb b/spec/features/tools/tool_permissions_spec.rb
index c68e986f6..1d7d8bd80 100644
--- a/spec/features/tools/tool_permissions_spec.rb
+++ b/spec/features/tools/tool_permissions_spec.rb
@@ -181,13 +181,6 @@
visit tool_path(@ingested_not_available_provider_tool['concept-id'])
end
- # Remove this section after CMR-6332 is resolved
- after do
- delete_response = cmr_client.delete_tool('SEDAC', @native_id_3, 'token')
-
- raise unless delete_response.success?
- end
-
it 'does not display the action links' do
expect(page).to have_no_link('Edit Tool Record')
expect(page).to have_no_link('Clone Tool Record')
From 55a52ae6a38c631defc2800a64c364d1046f2543 Mon Sep 17 00:00:00 2001
From: William Valencia
Date: Mon, 13 Jul 2020 21:25:09 -0400
Subject: [PATCH 16/49] MMT-2229 Removing unneeded native_id_3
---
spec/features/tools/tool_permissions_spec.rb | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/spec/features/tools/tool_permissions_spec.rb b/spec/features/tools/tool_permissions_spec.rb
index 1d7d8bd80..1c617a181 100644
--- a/spec/features/tools/tool_permissions_spec.rb
+++ b/spec/features/tools/tool_permissions_spec.rb
@@ -176,7 +176,7 @@
context 'when the tools provider is not in the users available providers' do
before do
- @ingested_not_available_provider_tool, _concept_response, @native_id_3 = publish_tool_draft(provider_id: 'SEDAC')
+ @ingested_not_available_provider_tool, _concept_response = publish_tool_draft(provider_id: 'SEDAC')
visit tool_path(@ingested_not_available_provider_tool['concept-id'])
end
From d767d9ff1f6f6cf3ced88f199d2a7074a4806fe5 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Tue, 14 Jul 2020 10:35:59 -0400
Subject: [PATCH 17/49] MMT-2313: made changes as per MMT-2311-1 PR change
requests
---
.gitignore | 2 +-
app/helpers/loss_report_helper.rb | 63 +++---
lib/tasks/translate_collections.rake | 281 +++++++++++++++++++++++++++
3 files changed, 318 insertions(+), 28 deletions(-)
create mode 100644 lib/tasks/translate_collections.rake
diff --git a/.gitignore b/.gitignore
index 25b8f5332..03dd71f16 100644
--- a/.gitignore
+++ b/.gitignore
@@ -59,4 +59,4 @@ nohup.out
*~
#ignore package
-package.json
+package-lock.json
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index fc006c1a3..7992d2da1 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -2,21 +2,20 @@ module LossReportHelper
def prepare_collections(concept_id, format, umm_c_version)
# TODO: need to add exception handling for get_concept, translate_collection
- original_collection_xml = cmr_client.get_concept(concept_id,token, {})
- original_collection_hash = Hash.from_xml(original_collection_xml.body)
- translated_collection_umm = cmr_client.translate_collection(original_collection_xml.body, "application/#{format}+xml", "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", skip_validation=true)
- translated_collection_xml = cmr_client.translate_collection(translated_collection_umm.body.to_json, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", "application/#{format}+xml", skip_validation=true)
- translated_collection_hash = Hash.from_xml(translated_collection_xml.body)
- return original_collection_xml.body, translated_collection_xml.body, original_collection_hash, translated_collection_hash
+ original_collection_native_xml = cmr_client.get_concept(concept_id,token, {})
+ original_collection_native_hash = Hash.from_xml(original_collection_native_xml.body)
+ translated_collection_umm_json = cmr_client.translate_collection(original_collection_native_xml.body, "application/#{format}+xml", "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", skip_validation=true)
+ translated_collection_native_xml = cmr_client.translate_collection(translated_collection_umm_json.body.to_json, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", "application/#{format}+xml", skip_validation=true)
+ translated_collection_native_hash = Hash.from_xml(translated_collection_native_xml.body)
+ return original_collection_native_xml.body, translated_collection_native_xml.body, original_collection_native_hash, translated_collection_native_hash
end
def path_leads_to_list?(path, org_hash, conv_hash)
+ # this method takes a path string (and the full original and converted hashes) and outputs true if the path string contains a list; else false
org_hash_path = hash_navigation(path, org_hash)
conv_hash_path = hash_navigation(path, conv_hash)
- if org_hash_path == 'flag' || conv_hash_path == 'flag'
- return false
- end
+ return false if org_hash_path == false || conv_hash_path == false
if path.include?("[") && path.include?("]")
bool = true
@@ -32,13 +31,15 @@ def path_leads_to_list?(path, org_hash, conv_hash)
end
def hash_navigation(dir, hash)
+ # Passed a path string and the hash being navigated. This method parses the path string and
+ # returns the hash at the end of the path
dir = dir.split("/")
if dir.is_a? Array
dir.each do |key|
if !key.empty? && hash.is_a?(Hash)
hash = hash[key]
elsif hash.is_a? Array
- return 'flag'
+ return false
end
end
else
@@ -48,6 +49,8 @@ def hash_navigation(dir, hash)
end
def get_list_paths(dif_hash, original, converted)
+ # arguments: differences hash, the original hash, and converted hash
+ # Using these 3 hashses, all paths that lead to a list are returned as an array of path strings
values_list = hash_to_list_of_values(dif_hash)
paths = Array.new
@@ -60,9 +63,13 @@ def get_list_paths(dif_hash, original, converted)
elsif conv_path.include? "[]"
path = conv_path
else
- path = org_path #arbitrary
+ path = org_path
end
+ # the get_dir method includes a clause that 'tags' array-containing fields with '[]'
+ # eg. '/Collection/Contacts/Contact[]/OrganizationEmails/Email'
+ # the following lines show how this 'tagging' is used to identify an array in a given directory
+
if path.include? "[]"
path = path.split "[]"
paths << path[0] unless paths.any? { |p| p.eql? path[0] }
@@ -74,6 +81,11 @@ def get_list_paths(dif_hash, original, converted)
end
def compare_arrays(original_hash, converted_hash, dh=false)
+ # arguments: differences hash, the original hash, and converted hash
+ # each path that leads to an array is used to navigate to that array and
+ # subsequently compare the arrays in the original and converted hashes.
+ # there is no usable ouput; there is printing to the terminal
+
dh ? dif_hash = dh.clone : dif_hash = find_difference_bt_hashes(original_hash, converted_hash).clone
original = original_hash.clone
converted = converted_hash.clone
@@ -83,27 +95,17 @@ def compare_arrays(original_hash, converted_hash, dh=false)
paths.each do |path|
org_array = hash_navigation(path, original)
- org_arr = org_array.clone
conv_array = hash_navigation(path, converted)
- conv_arr = conv_array.clone
- org_arr = Array.wrap(org_arr) unless org_arr.is_a?(Array)
+ org_array.is_a?(Array) ? org_arr = Array.wrap(org_array) : org_arr = org_array.clone
org_array = Array.wrap(org_array) unless org_array.is_a?(Array)
- conv_arr = Array.wrap(conv_arr) unless conv_arr.is_a?(Array)
+ conv_array.is_a?(Array) ? conv_arr = Array.wrap(conv_array) : conv_arr = conv_array.clone
conv_array = Array.wrap(conv_array) unless conv_array.is_a?(Array)
for conv_item in conv_array
for org_item in org_array
if org_item.eql? conv_item
org_arr.delete(org_item)
- break
- end
- end
- end
-
- for org_item in org_array
- for conv_item in conv_array
- if org_item.eql? conv_item
conv_arr.delete(conv_item)
break
end
@@ -128,6 +130,8 @@ def compare_arrays(original_hash, converted_hash, dh=false)
end
def find_difference_bt_hash_arrays(org_arr, conv_arr)
+ # array inputs; the output is an array that contains the items in the original array
+ # that were not found in the converted array
org = org_arr.clone
conv = conv_arr.clone
missing = Array.new
@@ -148,13 +152,15 @@ def find_difference_bt_hash_arrays(org_arr, conv_arr)
end
def find_difference_bt_hashes(org, conv)
+ # input is the original hash and the converted hash; the output is the
+ # 'differences hash' which represents the items in the original hash that were
+ # not found in the converted hash
missing = Hash.new
if org.eql? conv
return missing
else
org.each do |org_key,org_value|
- conv_value = conv[org_key]
- if conv.key? org_key
+ if (conv_value = conv[org_key])
if conv_value.eql? org_value
next
elsif org_value.is_a?(Hash) && conv_value.is_a?(Hash)
@@ -179,11 +185,12 @@ def find_difference_bt_hashes(org, conv)
end
def get_dir(value, hash_or_arr)
+ # passing the sought-after value and the hash or array being parsed
+ # output: a single string representing the path to the value arg passed to this method
iterable = hash_or_arr.clone
dir = String.new
if iterable.is_a? Hash
- unless iterable.key(value).nil?
- matching_key = iterable.key(value)
+ if (matching_key = iterable.key(value))
dir += '/' + matching_key
iterable.delete(matching_key)
return dir
@@ -219,6 +226,7 @@ def get_dir(value, hash_or_arr)
end
def hash_to_list_of_values(hash)
+ # converts a highly nested hash to a list of all its values
list = Array.new
for val in hash.values
if val.is_a? Hash
@@ -233,6 +241,7 @@ def hash_to_list_of_values(hash)
end
def array_to_list_of_values(array)
+ #converts a highly nested array to a list of all its values
ls = Array.new
for item in array
if item.is_a? Hash
diff --git a/lib/tasks/translate_collections.rake b/lib/tasks/translate_collections.rake
new file mode 100644
index 000000000..ec0ae90e8
--- /dev/null
+++ b/lib/tasks/translate_collections.rake
@@ -0,0 +1,281 @@
+require 'libxml_to_hash'
+
+namespace :collection do
+ desc 'Translate a collection from native format to UMM JSON and back to native format'
+ task :translate, [:file, :format, :disp, :version] => :environment do |_task, args|
+ args.with_defaults(:version => '1.15.3')
+ args.with_defaults(:disp => 'show')
+
+ abort 'FORMAT INVALID' unless args.format.eql? ('echo10' || 'dif10' || 'iso19115')
+
+ filename = args.file.split('/')[-1]
+ puts "\nTranslating #{filename} to UMM JSON..."
+
+ native_original_xml = File.read(args.file)
+ native_original_hash = Hash.from_xml(native_original_xml)
+
+ #translate to UMM
+ umm_response = cmr_client.translate_collection(native_original_xml, "application/#{args.format}+xml", "application/vnd.nasa.cmr.umm+json;version=#{args.version}", skip_validation=true )
+ umm_json = umm_response.body.to_json
+ umm_response.success? ? puts("\nsuccessful translation to UMM") : abort("\nUMM translation failure")
+
+ # translate back to native
+ back_to_native = cmr_client.translate_collection(umm_json, "application/vnd.nasa.cmr.umm+json;version=#{args.version}", "application/#{args.format}+xml", skip_validation=true )
+ native_converted_hash = Hash.from_xml(back_to_native.body)
+ native_converted_xml = back_to_native.body
+ back_to_native.success? ? puts("successful translation to native format \n\n") : abort("Native format translation failure \n\n")
+
+ # nokogiri output
+ nokogiri_original = Nokogiri::XML(native_original_xml) { |config| config.strict.noblanks }
+ nokogiri_converted = Nokogiri::XML(native_converted_xml) { |config| config.strict.noblanks }
+
+ nokogiri_original.diff(nokogiri_converted, {:added => true, :removed => true}) do |change,node|
+ next if path_leads_to_list?(node.parent.path, native_original_hash, native_converted_hash)
+ puts("#{change}: #{node.to_xml}".ljust(60) + node.parent.path) if args.disp.eql? 'show'
+ puts("#{change}: ". + node.parent.path) if args.disp.eql? 'hide'
+ end
+
+ # find differences
+ dif_hash = find_difference_bt_hashes(native_original_hash, native_converted_hash)
+ compare_arrays(dif_hash, native_original_hash, native_converted_hash)
+
+ end
+
+ def path_leads_to_list?(path, org_hash, conv_hash)
+ # this method takes a path string (and the full original and converted hashes) and outputs true if the path string contains a list; else false
+ org_hash_path = hash_navigation(path, org_hash)
+ conv_hash_path = hash_navigation(path, conv_hash)
+
+ return false if org_hash_path == false || conv_hash_path == false
+
+ if path.include?("[") && path.include?("]")
+ bool = true
+ elsif org_hash_path.is_a?(Hash) && conv_hash_path.is_a?(Hash)
+ org_hash_path.keys.each { |key| bool = true; break if org_hash_path[key].is_a?(Array) }
+ conv_hash_path.keys.each { |key| bool = true; break if conv_hash_path[key].is_a?(Array) }
+ elsif org_hash_path.is_a?(Array) || conv_hash_path.is_a?(Array)
+ bool = true
+ else
+ bool = false
+ end
+ bool
+ end
+
+ def hash_navigation(dir, hash)
+ # Passed a path string and the hash being navigated. This method parses the path string and
+ # returns the hash at the end of the path
+ dir = dir.split("/")
+ if dir.is_a? Array
+ dir.each do |key|
+ if !key.empty? && hash.is_a?(Hash)
+ hash = hash[key]
+ elsif hash.is_a? Array
+ return false
+ end
+ end
+ else
+ hash = hash[dir]
+ end
+ hash
+ end
+
+ def get_list_paths(dif_hash, original, converted)
+ # arguments: differences hash, the original hash, and converted hash
+ # Using these 3 hashses, all paths that lead to a list are returned as an array of path strings
+ values_list = hash_to_list_of_values(dif_hash)
+ paths = Array.new
+
+ for item in values_list
+ org_path = get_dir(item, original)
+ conv_path = get_dir(item, converted)
+
+ if org_path.include? "[]"
+ path = org_path
+ elsif conv_path.include? "[]"
+ path = conv_path
+ else
+ path = org_path
+ end
+
+ # the get_dir method includes a clause that 'tags' array-containing fields with '[]'
+ # eg. '/Collection/Contacts/Contact[]/OrganizationEmails/Email'
+ # the following lines show how this 'tagging' is used to identify an array in a given directory
+
+ if path.include? "[]"
+ path = path.split "[]"
+ paths << path[0] unless paths.any? { |p| p.eql? path[0] }
+ elsif path_leads_to_list?(path, original, converted)
+ paths << path unless paths.any? { |p| p.eql? path }
+ end
+ end
+ paths
+ end
+
+ def compare_arrays(dif_hash, original, converted)
+ # arguments: differences hash, the original hash, and converted hash
+ # each path that leads to an array is used to navigate to that array and
+ # subsequently compare the arrays in the original and converted hashes.
+ # there is no usable ouput; there is printing to the terminal
+ paths = get_list_paths(dif_hash, original, converted)
+
+ paths.each do |path|
+ org_array = hash_navigation(path, original)
+ conv_array = hash_navigation(path, converted)
+
+ org_array.is_a?(Array) ? org_arr = Array.wrap(org_array) : org_arr = org_array.clone
+ org_array = Array.wrap(org_array) unless org_array.is_a?(Array)
+ conv_array.is_a?(Array) ? conv_arr = Array.wrap(conv_array) : conv_arr = conv_array.clone
+ conv_array = Array.wrap(conv_array) unless conv_array.is_a?(Array)
+
+ for conv_item in conv_array
+ for org_item in org_array
+ if org_item.eql? conv_item
+ org_arr.delete(org_item)
+ conv_arr.delete(conv_item)
+ break
+ end
+ end
+ end
+
+ org_arr.each do |item|
+ path_with_index = path + "[#{org_array.index(item)}]"
+ puts "-: ". + path_with_index
+ end
+
+ conv_arr.each do |item|
+ path_with_index = path + "[#{conv_array.index(item)}]"
+ puts "+: " + path_with_index
+ end
+ end
+ end
+
+ def find_difference_bt_hash_arrays(org_arr, conv_arr)
+ # array inputs; the output is an array that contains the items in the original array
+ # that were not found in the converted array
+ org = org_arr.clone
+ conv = conv_arr.clone
+ missing = Array.new
+ if org.eql? conv
+ return missing
+ else
+ for conv_item in conv
+ for org_item in org
+ if org_item.eql? conv_item
+ org.delete(conv_item)
+ break
+ end
+ end
+ end
+ missing += org
+ end
+ missing
+ end
+
+ def find_difference_bt_hashes(org, conv)
+ # input is the original hash and the converted hash; the output is the
+ # 'differences hash' which represents the items in the original hash that were
+ # not found in the converted hash
+ missing = Hash.new
+ if org.eql? conv
+ return missing
+ else
+ org.each do |org_key,org_value|
+ if (conv_value = conv[org_key])
+ if conv_value.eql? org_value
+ next
+ elsif org_value.is_a?(Hash) && conv_value.is_a?(Hash)
+ missing_value = find_difference_bt_hashes(org_value, conv_value)
+ unless missing_value.empty?
+ missing[org_key] = missing_value
+ end
+ elsif org_value.is_a?(Array) && conv_value.is_a?(Array)
+ missing_value = find_difference_bt_hash_arrays(org_value, conv_value)
+ unless missing_value.empty?
+ missing[org_key] = missing_value
+ end
+ else
+ missing[org_key] = org_value
+ end
+ else
+ missing[org_key] = org_value
+ end
+ end
+ end
+ missing
+ end
+
+ def get_dir(value, hash_or_arr)
+ # passing the sought-after value and the hash or array being parsed
+ # output: a single string representing the path to the value arg passed to this method
+ iterable = hash_or_arr.clone
+ dir = String.new
+ if iterable.is_a? Hash
+ if (matching_key = iterable.key(value))
+ dir += '/' + matching_key
+ iterable.delete(matching_key)
+ return dir
+ else
+ iterable.each do |key,val|
+ if val.is_a?(Hash) && hash_to_list_of_values(val).include?(value)
+ dir += '/' + key
+ dir += get_dir(value, val)
+ return dir
+ elsif val.is_a?(Array) && array_to_list_of_values(val).include?(value)
+ dir += '/' + key + "[]"
+ dir += get_dir(value, val)
+ return dir
+ elsif val.eql? value
+ dir += '/' + key
+ iterable.delete(key)
+ return dir
+ end
+ end
+ end
+ elsif iterable.is_a? Array
+ iterable.each do |item|
+ if item.is_a?(Hash) && hash_to_list_of_values(item).include?(value)
+ dir += get_dir(value,item)
+ return dir
+ elsif item.is_a?(Array) && array_to_list_of_values(item).include?(value)
+ dir += get_dir(value,item) + "[]"
+ return dir
+ end
+ end
+ end
+ dir
+ end
+
+ def hash_to_list_of_values(hash)
+ # converts a highly nested hash to a list of all its values
+ list = Array.new
+ for val in hash.values
+ if val.is_a? Hash
+ list += hash_to_list_of_values(val)
+ elsif val.is_a? Array
+ list += array_to_list_of_values(val)
+ else
+ list << val
+ end
+ end
+ list
+ end
+
+ def array_to_list_of_values(array)
+ #converts a highly nested array to a list of all its values
+ ls = Array.new
+ for item in array
+ if item.is_a? Hash
+ ls += hash_to_list_of_values(item)
+ elsif item.is_a? Array
+ ls += array_to_list_of_values(item)
+ else
+ ls << item
+ end
+ end
+ ls
+ end
+
+ def cmr_client
+ @cmr_client ||= Cmr::Client.client_for_environment(Rails.configuration.cmr_env, Rails.configuration.services)
+ end
+end
From 83f50570a518480c636d456433bfce0416750d71 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Wed, 15 Jul 2020 13:33:26 -0400
Subject: [PATCH 18/49] MMT-2313: Updated comparison logic to use nokogiri for
array paths
---
.gitignore | 1 +
app/helpers/loss_report_helper.rb | 63 +++++++++---
app/views/collections/loss_report.html.erb | 109 ++++++++++++++++++++-
config/routes.rb | 2 +-
4 files changed, 160 insertions(+), 15 deletions(-)
diff --git a/.gitignore b/.gitignore
index 03dd71f16..e4ef9f124 100644
--- a/.gitignore
+++ b/.gitignore
@@ -60,3 +60,4 @@ nohup.out
#ignore package
package-lock.json
+package.json
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 7992d2da1..80aa95096 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -3,10 +3,15 @@ module LossReportHelper
def prepare_collections(concept_id, format, umm_c_version)
# TODO: need to add exception handling for get_concept, translate_collection
original_collection_native_xml = cmr_client.get_concept(concept_id,token, {})
+ # concept ID and format can be scalped from headers etc
original_collection_native_hash = Hash.from_xml(original_collection_native_xml.body)
translated_collection_umm_json = cmr_client.translate_collection(original_collection_native_xml.body, "application/#{format}+xml", "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", skip_validation=true)
translated_collection_native_xml = cmr_client.translate_collection(translated_collection_umm_json.body.to_json, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", "application/#{format}+xml", skip_validation=true)
translated_collection_native_hash = Hash.from_xml(translated_collection_native_xml.body)
+ # File.write('/Users/ctrummer/Documents/devtesting/o_'+concept_id+'.json', JSON.pretty_generate(original_collection_native_hash))
+ # File.write('/Users/ctrummer/Documents/devtesting/c_'+concept_id+'.json', JSON.pretty_generate(translated_collection_native_hash))
+ # File.write('/Users/ctrummer/Documents/devtesting/o_'+concept_id+'.xml', original_collection_native_xml.body)
+ # File.write('/Users/ctrummer/Documents/devtesting/c_'+concept_id+'.xml', translated_collection_native_xml.body)
return original_collection_native_xml.body, translated_collection_native_xml.body, original_collection_native_hash, translated_collection_native_hash
end
@@ -15,13 +20,11 @@ def path_leads_to_list?(path, org_hash, conv_hash)
org_hash_path = hash_navigation(path, org_hash)
conv_hash_path = hash_navigation(path, conv_hash)
- return false if org_hash_path == false || conv_hash_path == false
-
if path.include?("[") && path.include?("]")
bool = true
elsif org_hash_path.is_a?(Hash) && conv_hash_path.is_a?(Hash)
- org_hash_path.keys.each { |key| bool = true; break if org_hash_path[key].is_a?(Array) }
- conv_hash_path.keys.each { |key| bool = true; break if conv_hash_path[key].is_a?(Array) }
+ bool = true if org_hash_path.keys.length == 1 && org_hash_path[org_hash_path.keys[0]].is_a?(Array)
+ bool = true if conv_hash_path.keys.length == 1 && conv_hash_path[conv_hash_path.keys[0]].is_a?(Array)
elsif org_hash_path.is_a?(Array) || conv_hash_path.is_a?(Array)
bool = true
else
@@ -33,13 +36,13 @@ def path_leads_to_list?(path, org_hash, conv_hash)
def hash_navigation(dir, hash)
# Passed a path string and the hash being navigated. This method parses the path string and
# returns the hash at the end of the path
- dir = dir.split("/")
+ dir = dir.split '/'
if dir.is_a? Array
dir.each do |key|
if !key.empty? && hash.is_a?(Hash)
hash = hash[key]
elsif hash.is_a? Array
- return false
+ return hash
end
end
else
@@ -80,6 +83,44 @@ def get_list_paths(dif_hash, original, converted)
paths
end
+ def array_comparison(path, original_hash, converted_hash)
+ # this is a 'less iterative' version of compare_arrays. Args: a single path, the original hash, and the converted hash.
+ # Rather than finding all the array paths and using those to find the array differences, the array paths are individually
+ # supplied by the nokogiri gem; this reduces redundancy
+ org_array = hash_navigation(path, original_hash)
+ conv_array = hash_navigation(path, converted_hash)
+
+ org_array.is_a?(Array) ? org_arr = org_array.clone : org_arr = Array.wrap(org_array)
+ org_array = Array.wrap(org_array) unless org_array.is_a?(Array)
+ conv_array.is_a?(Array) ? conv_arr = conv_array.clone : conv_arr = Array.wrap(conv_array)
+ conv_array = Array.wrap(conv_array) unless conv_array.is_a?(Array)
+
+ for conv_item in conv_array
+ for org_item in org_array
+ if org_item.eql? conv_item
+ org_arr.delete(org_item)
+ conv_arr.delete(conv_item)
+ break
+ end
+ end
+ end
+
+ output = Array.new
+ org_arr.each do |item|
+ path_with_index = path + "[#{org_array.index(item)}]"
+ loss_item = ['-', item, path_with_index]
+ output << loss_item
+ end
+
+
+ conv_arr.each do |item|
+ path_with_index = path + "[#{conv_array.index(item)}]"
+ loss_item = ['+', item, path_with_index]
+ output << loss_item
+ end
+ output
+ end
+
def compare_arrays(original_hash, converted_hash, dh=false)
# arguments: differences hash, the original hash, and converted hash
# each path that leads to an array is used to navigate to that array and
@@ -94,12 +135,12 @@ def compare_arrays(original_hash, converted_hash, dh=false)
output = Array.new
paths.each do |path|
- org_array = hash_navigation(path, original)
- conv_array = hash_navigation(path, converted)
+ org_array = hash_navigation(path, original_hash)
+ conv_array = hash_navigation(path, converted_hash)
- org_array.is_a?(Array) ? org_arr = Array.wrap(org_array) : org_arr = org_array.clone
+ org_array.is_a?(Array) ? org_arr = org_array.clone : org_arr = Array.wrap(org_array)
org_array = Array.wrap(org_array) unless org_array.is_a?(Array)
- conv_array.is_a?(Array) ? conv_arr = Array.wrap(conv_array) : conv_arr = conv_array.clone
+ conv_array.is_a?(Array) ? conv_arr = conv_array.clone : conv_arr = Array.wrap(conv_array)
conv_array = Array.wrap(conv_array) unless conv_array.is_a?(Array)
for conv_item in conv_array
@@ -121,7 +162,7 @@ def compare_arrays(original_hash, converted_hash, dh=false)
conv_arr.each do |item|
path_with_index = path + "[#{conv_array.index(item)}]"
- puts "+: ".ljust(60) + path_with_index #THIS INDEX DOESN'T MAKE SENSE
+ puts "+: ".ljust(60) + path_with_index
loss_item = ['+', path_with_index]
output << loss_item
end
diff --git a/app/views/collections/loss_report.html.erb b/app/views/collections/loss_report.html.erb
index 702b54024..7e19bdfdf 100644
--- a/app/views/collections/loss_report.html.erb
+++ b/app/views/collections/loss_report.html.erb
@@ -1,34 +1,137 @@
-
+
| <%= 'Alteration' %> |
- <%= 'Path' %> |
+ <%= 'Node' %> |
+ <%= 'Path' %> |
- <% orig,conv,orig_h,conv_h = prepare_collections('C1200000085-NSIDC_ECS', 'echo10', '1.15.3') %>
+ <% orig,conv,orig_h,conv_h = prepare_collections('C1200000063-LARC', 'echo10', '1.15.3') %>
<% orig = Nokogiri::XML(orig) { |config| config.strict.noblanks } %>
<% conv = Nokogiri::XML(conv) { |config| config.strict.noblanks } %>
+ <% ignored_paths = Array.new %>
+
+
+ |
+ <%= 'NOKOGIRI + FAST COMPARISON ' %>
+ |
+
+ <%= ' ' %>
+ |
+
+ <%= ' ' %>
+ |
+
+
+ <% counter = 0 %>
+ <% orig.diff(conv, {:added => true, :removed => true}) do |change,node| %>
+ <% if node.parent.path.include?('[') && !ignored_paths.include?(node.parent.path.split('[')[0]) %>
+ <% ignored_paths << node.parent.path.split('[')[0] %>
+ <% array_comparison(node.parent.path.split('[')[0], orig_h, conv_h).each do |item| %>
+
+ |
+ <%= item[0] %>
+ |
+
+ <%= counter %>
+ <%= item[1] %>
+ |
+
+ <%= item[2] %>
+ |
+
+ <% counter += 1%>
+ <% end %>
+ <% elsif !ignored_paths.include?(node.parent.path.split('[')[0]) && !path_leads_to_list?(node.parent.path, orig_h, conv_h) %>
+
+ |
+ <%= change %>
+ |
+
+ <%= counter %>
+ <%= node.to_html %>
+ |
+
+ <%= node.parent.path %>
+ |
+
+ <% counter += 1%>
+ <% end %>
+ <% end %>
+
+
+ |
+ <%= 'NOKOGIRI + EXPENSIVE COMPARISON ' %>
+ |
+
+ <%= ' ' %>
+ |
+
+ <%= ' ' %>
+ |
+
+
+ <% counter = 0 %>
<% orig.diff(conv, {:added => true, :removed => true}) do |change,node| %>
<% next if path_leads_to_list?(node.parent.path, orig_h, conv_h) %>
|
<%= change %>
|
+
+ <%= counter %>
+ <%= node %>
+ |
<%= node.parent.path %>
|
+ <% counter += 1%>
<% end %>
<% compare_arrays(orig_h, conv_h).each do |item| %>
|
<%= item[0] %>
|
+
+ <%= counter %>
+ |
<%= item[1] %>
|
+ <% counter += 1%>
+ <% end %>
+
+
+ |
+ <%= 'ONLY NOKOGIRI ' %>
+ |
+
+ <%= ' ' %>
+ |
+
+ <%= ' ' %>
+ |
+
+
+ <% counter = 0 %>
+ <% orig.diff(conv, {:added => true, :removed => true}) do |change,node| %>
+ <% next if path_leads_to_list?(node.parent.path, orig_h, conv_h) %>
+
+ |
+ <%= change %>
+ |
+
+ <%= counter %>
+ <%= node.to_html %>
+ |
+
+ <%= node.parent.path %>
+ |
+
+ <% counter += 1%>
<% end %>
diff --git a/config/routes.rb b/config/routes.rb
index 23cd27dff..74e5b20ac 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -69,7 +69,7 @@
get '/collections/:id/download_xml/:format(/:revision_id)' => 'collections#download_xml', as: 'download_collection_xml'
get '/collections/:id/create_delete_proposal' => 'collections#create_delete_proposal', as: 'create_delete_proposal_collection'
get '/collections/:id/create_update_proposal' => 'collections#create_update_proposal', as: 'create_update_proposal_collection'
- get 'collections/:id/loss' => 'collections#loss_report'
+ get '/collections/:id/loss' => 'collections#loss_report'
resource :variable_generation_processes_search, only: [:new]
From affd9ab38443497a41354bec1f5bd25c981a99af Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Wed, 15 Jul 2020 14:11:33 -0400
Subject: [PATCH 19/49] MMT-2313: removed some comments and test code
---
app/controllers/collections_controller.rb | 3 +
app/views/collections/loss_report.html.erb | 87 ----------------------
2 files changed, 3 insertions(+), 87 deletions(-)
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index da1c79d88..63a4210cf 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -119,6 +119,9 @@ def loss_report
# When a user wants to use MMT to edit metadata that currently exists in a non-UMM form,
# it's important that they're able to see if any data loss occurs in the translation to umm.
# This method is needed to reference the appropriate helper and view for the lossiness report
+ concept_id = params[:id]
+ collection_response = cmr_client.get_concept(concept_id, token, {})
+ render txt: collection_response.body
end
private
diff --git a/app/views/collections/loss_report.html.erb b/app/views/collections/loss_report.html.erb
index 7e19bdfdf..8716ce780 100644
--- a/app/views/collections/loss_report.html.erb
+++ b/app/views/collections/loss_report.html.erb
@@ -11,19 +11,6 @@
<% orig = Nokogiri::XML(orig) { |config| config.strict.noblanks } %>
<% conv = Nokogiri::XML(conv) { |config| config.strict.noblanks } %>
<% ignored_paths = Array.new %>
-
-
- |
- <%= 'NOKOGIRI + FAST COMPARISON ' %>
- |
-
- <%= ' ' %>
- |
-
- <%= ' ' %>
- |
-
-
<% counter = 0 %>
<% orig.diff(conv, {:added => true, :removed => true}) do |change,node| %>
<% if node.parent.path.include?('[') && !ignored_paths.include?(node.parent.path.split('[')[0]) %>
@@ -59,79 +46,5 @@
<% counter += 1%>
<% end %>
<% end %>
-
-
- |
- <%= 'NOKOGIRI + EXPENSIVE COMPARISON ' %>
- |
-
- <%= ' ' %>
- |
-
- <%= ' ' %>
- |
-
-
- <% counter = 0 %>
- <% orig.diff(conv, {:added => true, :removed => true}) do |change,node| %>
- <% next if path_leads_to_list?(node.parent.path, orig_h, conv_h) %>
-
- |
- <%= change %>
- |
-
- <%= counter %>
- <%= node %>
- |
-
- <%= node.parent.path %>
- |
-
- <% counter += 1%>
- <% end %>
- <% compare_arrays(orig_h, conv_h).each do |item| %>
-
- |
- <%= item[0] %>
- |
-
- <%= counter %>
- |
-
- <%= item[1] %>
- |
-
- <% counter += 1%>
- <% end %>
-
-
- |
- <%= 'ONLY NOKOGIRI ' %>
- |
-
- <%= ' ' %>
- |
-
- <%= ' ' %>
- |
-
-
- <% counter = 0 %>
- <% orig.diff(conv, {:added => true, :removed => true}) do |change,node| %>
- <% next if path_leads_to_list?(node.parent.path, orig_h, conv_h) %>
-
- |
- <%= change %>
- |
-
- <%= counter %>
- <%= node.to_html %>
- |
-
- <%= node.parent.path %>
- |
-
- <% counter += 1%>
- <% end %>
From 7cae6d6a78948dcdeb0cfe8341dca9a631f86724 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Thu, 16 Jul 2020 15:10:01 -0400
Subject: [PATCH 20/49] MMT-2313: Added custom function for hash mapping, finds
the path for every value in a hash
---
app/controllers/collections_controller.rb | 7 +-
app/helpers/loss_report_helper.rb | 308 ++++++---------------
app/views/collections/loss_report.html.erb | 1 +
lib/tasks/compare_xml_collections.rake | 124 +++++++++
lib/tasks/translate_collections.rake | 281 -------------------
5 files changed, 219 insertions(+), 502 deletions(-)
create mode 100644 lib/tasks/compare_xml_collections.rake
delete mode 100644 lib/tasks/translate_collections.rake
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index 63a4210cf..5cc0fb3fd 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -2,6 +2,7 @@ class CollectionsController < ManageCollectionsController
include ManageMetadataHelper
include CMRCollectionsHelper
include CollectionsHelper
+ include LossReportHelper
before_action :set_collection
before_action :ensure_correct_collection_provider, only: [:edit, :clone, :revert, :destroy]
@@ -120,8 +121,10 @@ def loss_report
# it's important that they're able to see if any data loss occurs in the translation to umm.
# This method is needed to reference the appropriate helper and view for the lossiness report
concept_id = params[:id]
- collection_response = cmr_client.get_concept(concept_id, token, {})
- render txt: collection_response.body
+ respond_to do |format|
+ format.any {render plain: loss_report_output(concept_id, hide_items=false, disp='text') }
+ format.json { render json: JSON.pretty_generate(loss_report_output(concept_id, hide_items=false, disp='json')) }
+ end
end
private
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 80aa95096..fd946c95d 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -1,5 +1,80 @@
module LossReportHelper
+ def loss_report_output(concept_id, hide_items=true, disp='text')
+ orig,conv,orig_h,conv_h = prepare_collections(concept_id, 'echo10', '1.15.3')
+ orig = Nokogiri::XML(orig) { |config| config.strict.noblanks }
+ conv = Nokogiri::XML(conv) { |config| config.strict.noblanks }
+
+ ignored_paths = Array.new
+ comparison_string = String.new if disp == 'text'
+ comparison_hash = Hash.new if disp == 'json'
+
+ counter = 1
+ orig.diff(conv, {:added => true, :removed => true}) do |change,node|
+ split_path = node.parent.path.split('[')
+ if node.parent.path.include?('[') && !ignored_paths.include?(split_path[0])
+ ignored_paths << split_path[0]
+ array_comparison(split_path[0], orig_h, conv_h).each do |item|
+ if disp == 'text'
+ comparison_string += "#{counter}. #{item[0]}: #{item[1]}".ljust(60) + item[2] + "\n" if hide_items == false
+ comparison_string += "#{counter}. #{item[0]}: ".ljust(2) + item[2] + "\n" if hide_items == true
+ elsif disp == 'json'
+ comparison_hash["#{counter}. #{item[0]}: #{item[2]}"] = item[1]
+ end
+ counter += 1
+ end
+ elsif !ignored_paths.include?(split_path[0]) && !path_leads_to_list?(node.parent.path, orig_h, conv_h)
+ element = node.to_xml
+ path = node.parent.path
+ if disp == 'text'
+ if element.include?('<' && '' && '>')
+ element = Hash.from_xml(element)
+ hash_map(element).each do |item|
+ comparison_string += "#{counter}. #{change}: #{item['value']}".ljust(60) + path + '/' + item['path'] + "\n" if hide_items == false
+ comparison_string += "#{counter}. #{change}: ".ljust(2) + path + '/' + item['path'] + "\n" if hide_items == true
+ counter += 1
+ end
+ else
+ comparison_string += "#{counter}. #{change}: #{element}".ljust(60) + path + "\n" if hide_items == false
+ comparison_string += "#{counter}. #{change}: ".ljust(2) + path + "\n" if hide_items == true
+ counter += 1
+ end
+ elsif disp == 'json'
+ if element.include?('<' && '' && '>')
+ element = Hash.from_xml(element)
+ hash_map(element).each do |item|
+ comparison_hash["#{counter}. #{change}: #{path + '/' + item['path']}"] = item['value']
+ counter += 1
+ end
+ else
+ comparison_hash["#{counter}. #{change}: #{path}"] = element
+ counter += 1
+ end
+ end
+ end
+ end
+ if disp == 'text'
+ return comparison_string
+ elsif disp == 'json'
+ return comparison_hash
+ end
+ end
+
+ def hash_map(hash)
+ buckets = Array.new
+ hash.each do |key,val|
+ if val.is_a? Hash
+ hash_map(val).each do |item|
+ item['path'] = key + '/' + item['path']
+ buckets << item
+ end
+ else
+ buckets << {'path'=> key, 'value'=> val}
+ end
+ end
+ buckets
+ end
+
def prepare_collections(concept_id, format, umm_c_version)
# TODO: need to add exception handling for get_concept, translate_collection
original_collection_native_xml = cmr_client.get_concept(concept_id,token, {})
@@ -23,6 +98,10 @@ def path_leads_to_list?(path, org_hash, conv_hash)
if path.include?("[") && path.include?("]")
bool = true
elsif org_hash_path.is_a?(Hash) && conv_hash_path.is_a?(Hash)
+ # the number of keys must be 1 because all arrays in echo10, dif10, and iso19115 are tagged similar to:
+ # contact and so all array-containing tags will be the plural
+ # of the array name. This clause serves to idenitfy array-containing tags when their paths aren't properly
+ # displayed by nokogiri
bool = true if org_hash_path.keys.length == 1 && org_hash_path[org_hash_path.keys[0]].is_a?(Array)
bool = true if conv_hash_path.keys.length == 1 && conv_hash_path[conv_hash_path.keys[0]].is_a?(Array)
elsif org_hash_path.is_a?(Array) || conv_hash_path.is_a?(Array)
@@ -37,56 +116,18 @@ def hash_navigation(dir, hash)
# Passed a path string and the hash being navigated. This method parses the path string and
# returns the hash at the end of the path
dir = dir.split '/'
- if dir.is_a? Array
- dir.each do |key|
- if !key.empty? && hash.is_a?(Hash)
- hash = hash[key]
- elsif hash.is_a? Array
- return hash
- end
+ dir.each do |key|
+ if !key.empty? && hash.is_a?(Hash)
+ hash = hash[key]
+ elsif hash.is_a? Array
+ return hash
end
- else
- hash = hash[dir]
end
hash
end
- def get_list_paths(dif_hash, original, converted)
- # arguments: differences hash, the original hash, and converted hash
- # Using these 3 hashses, all paths that lead to a list are returned as an array of path strings
- values_list = hash_to_list_of_values(dif_hash)
- paths = Array.new
-
- for item in values_list
- org_path = get_dir(item, original)
- conv_path = get_dir(item, converted)
-
- if org_path.include? "[]"
- path = org_path
- elsif conv_path.include? "[]"
- path = conv_path
- else
- path = org_path
- end
-
- # the get_dir method includes a clause that 'tags' array-containing fields with '[]'
- # eg. '/Collection/Contacts/Contact[]/OrganizationEmails/Email'
- # the following lines show how this 'tagging' is used to identify an array in a given directory
-
- if path.include? "[]"
- path = path.split "[]"
- paths << path[0] unless paths.any? { |p| p.eql? path[0] }
- elsif path_leads_to_list?(path, original, converted)
- paths << path unless paths.any? { |p| p.eql? path }
- end
- end
- paths
- end
-
def array_comparison(path, original_hash, converted_hash)
- # this is a 'less iterative' version of compare_arrays. Args: a single path, the original hash, and the converted hash.
- # Rather than finding all the array paths and using those to find the array differences, the array paths are individually
- # supplied by the nokogiri gem; this reduces redundancy
+
org_array = hash_navigation(path, original_hash)
conv_array = hash_navigation(path, converted_hash)
@@ -95,6 +136,10 @@ def array_comparison(path, original_hash, converted_hash)
conv_array.is_a?(Array) ? conv_arr = conv_array.clone : conv_arr = Array.wrap(conv_array)
conv_array = Array.wrap(conv_array) unless conv_array.is_a?(Array)
+ # org_arr and conv_arr are copies of org_array and conv_array, respectively.
+ # The *_arr values are edited during the comparison between the org_array and conv_array arrays
+ # and so the *_array arrays are used to maintained a full version of each array for indexing the items in the following lines.
+
for conv_item in conv_array
for org_item in org_array
if org_item.eql? conv_item
@@ -120,179 +165,4 @@ def array_comparison(path, original_hash, converted_hash)
end
output
end
-
- def compare_arrays(original_hash, converted_hash, dh=false)
- # arguments: differences hash, the original hash, and converted hash
- # each path that leads to an array is used to navigate to that array and
- # subsequently compare the arrays in the original and converted hashes.
- # there is no usable ouput; there is printing to the terminal
-
- dh ? dif_hash = dh.clone : dif_hash = find_difference_bt_hashes(original_hash, converted_hash).clone
- original = original_hash.clone
- converted = converted_hash.clone
- paths = get_list_paths(dif_hash, original, converted)
-
- output = Array.new
-
- paths.each do |path|
- org_array = hash_navigation(path, original_hash)
- conv_array = hash_navigation(path, converted_hash)
-
- org_array.is_a?(Array) ? org_arr = org_array.clone : org_arr = Array.wrap(org_array)
- org_array = Array.wrap(org_array) unless org_array.is_a?(Array)
- conv_array.is_a?(Array) ? conv_arr = conv_array.clone : conv_arr = Array.wrap(conv_array)
- conv_array = Array.wrap(conv_array) unless conv_array.is_a?(Array)
-
- for conv_item in conv_array
- for org_item in org_array
- if org_item.eql? conv_item
- org_arr.delete(org_item)
- conv_arr.delete(conv_item)
- break
- end
- end
- end
-
- org_arr.each do |item|
- path_with_index = path + "[#{org_array.index(item)}]"
- puts "-: ".ljust(60) + path_with_index
- loss_item = ['-', path_with_index]
- output << loss_item
- end
-
- conv_arr.each do |item|
- path_with_index = path + "[#{conv_array.index(item)}]"
- puts "+: ".ljust(60) + path_with_index
- loss_item = ['+', path_with_index]
- output << loss_item
- end
- end
- output
- end
-
- def find_difference_bt_hash_arrays(org_arr, conv_arr)
- # array inputs; the output is an array that contains the items in the original array
- # that were not found in the converted array
- org = org_arr.clone
- conv = conv_arr.clone
- missing = Array.new
- if org.eql? conv
- return missing
- else
- for conv_item in conv
- for org_item in org
- if org_item.eql? conv_item
- org.delete(conv_item)
- break
- end
- end
- end
- missing += org
- end
- missing
- end
-
- def find_difference_bt_hashes(org, conv)
- # input is the original hash and the converted hash; the output is the
- # 'differences hash' which represents the items in the original hash that were
- # not found in the converted hash
- missing = Hash.new
- if org.eql? conv
- return missing
- else
- org.each do |org_key,org_value|
- if (conv_value = conv[org_key])
- if conv_value.eql? org_value
- next
- elsif org_value.is_a?(Hash) && conv_value.is_a?(Hash)
- missing_value = find_difference_bt_hashes(org_value, conv_value)
- unless missing_value.empty?
- missing[org_key] = missing_value
- end
- elsif org_value.is_a?(Array) && conv_value.is_a?(Array)
- missing_value = find_difference_bt_hash_arrays(org_value, conv_value)
- unless missing_value.empty?
- missing[org_key] = missing_value
- end
- else
- missing[org_key] = org_value
- end
- else
- missing[org_key] = org_value
- end
- end
- end
- missing
- end
-
- def get_dir(value, hash_or_arr)
- # passing the sought-after value and the hash or array being parsed
- # output: a single string representing the path to the value arg passed to this method
- iterable = hash_or_arr.clone
- dir = String.new
- if iterable.is_a? Hash
- if (matching_key = iterable.key(value))
- dir += '/' + matching_key
- iterable.delete(matching_key)
- return dir
- else
- iterable.each do |key,val|
- if val.is_a?(Hash) && hash_to_list_of_values(val).include?(value)
- dir += '/' + key
- dir += get_dir(value, val)
- return dir
- elsif val.is_a?(Array) && array_to_list_of_values(val).include?(value)
- dir += '/' + key + "[]"
- dir += get_dir(value, val)
- return dir
- elsif val.eql? value
- dir += '/' + key
- iterable.delete(key)
- return dir
- end
- end
- end
- elsif iterable.is_a? Array
- iterable.each do |item|
- if item.is_a?(Hash) && hash_to_list_of_values(item).include?(value)
- dir += get_dir(value,item)
- return dir
- elsif item.is_a?(Array) && array_to_list_of_values(item).include?(value)
- dir += get_dir(value,item) + "[]"
- return dir
- end
- end
- end
- dir
- end
-
- def hash_to_list_of_values(hash)
- # converts a highly nested hash to a list of all its values
- list = Array.new
- for val in hash.values
- if val.is_a? Hash
- list += hash_to_list_of_values(val)
- elsif val.is_a? Array
- list += array_to_list_of_values(val)
- else
- list << val
- end
- end
- list
- end
-
- def array_to_list_of_values(array)
- #converts a highly nested array to a list of all its values
- ls = Array.new
- for item in array
- if item.is_a? Hash
- ls += hash_to_list_of_values(item)
- elsif item.is_a? Array
- ls += array_to_list_of_values(item)
- else
- ls << item
- end
- end
- ls
- end
end
diff --git a/app/views/collections/loss_report.html.erb b/app/views/collections/loss_report.html.erb
index 8716ce780..f27bbf9f0 100644
--- a/app/views/collections/loss_report.html.erb
+++ b/app/views/collections/loss_report.html.erb
@@ -6,6 +6,7 @@
<%= 'Path' %> |
+
<% orig,conv,orig_h,conv_h = prepare_collections('C1200000063-LARC', 'echo10', '1.15.3') %>
<% orig = Nokogiri::XML(orig) { |config| config.strict.noblanks } %>
diff --git a/lib/tasks/compare_xml_collections.rake b/lib/tasks/compare_xml_collections.rake
new file mode 100644
index 000000000..27ca858b3
--- /dev/null
+++ b/lib/tasks/compare_xml_collections.rake
@@ -0,0 +1,124 @@
+require 'libxml_to_hash'
+
+namespace :collection do
+ desc 'Translate a collection from native format to UMM JSON and back to native format'
+ task :loss, [:file, :format, :disp, :version] => :environment do |_task, args|
+ args.with_defaults(:version => '1.15.3')
+ args.with_defaults(:disp => 'show')
+
+ abort 'FORMAT INVALID' unless args.format.eql? ('echo10' || 'dif10' || 'iso19115')
+
+ filename = args.file.split('/')[-1]
+ puts "\nTranslating #{filename} to UMM JSON..."
+
+ native_original_xml = File.read(args.file)
+ native_original_hash = Hash.from_xml(native_original_xml)
+
+ #translate to UMM
+ umm_response = cmr_client.translate_collection(native_original_xml, "application/#{args.format}+xml", "application/vnd.nasa.cmr.umm+json;version=#{args.version}", skip_validation=true )
+ umm_json = umm_response.body.to_json
+ umm_response.success? ? puts("\nsuccessful translation to UMM") : abort("\nUMM translation failure")
+
+ # translate back to native
+ back_to_native = cmr_client.translate_collection(umm_json, "application/vnd.nasa.cmr.umm+json;version=#{args.version}", "application/#{args.format}+xml", skip_validation=true )
+ native_converted_hash = Hash.from_xml(back_to_native.body)
+ native_converted_xml = back_to_native.body
+ back_to_native.success? ? puts("successful translation to native format \n\n") : abort("Native format translation failure \n\n")
+
+ # nokogiri output
+ nokogiri_original = Nokogiri::XML(native_original_xml) { |config| config.strict.noblanks }
+ nokogiri_converted = Nokogiri::XML(native_converted_xml) { |config| config.strict.noblanks }
+
+ ignored_paths = Array.new
+
+ nokogiri_original.diff(nokogiri_converted, {:added => true, :removed => true}) do |change,node|
+ split_path = node.parent.path.split('[')
+ if node.parent.path.include?('[') && !ignored_paths.include?(split_path[0])
+ ignored_paths << split_path[0]
+ array_comparison(split_path[0], native_original_hash, native_converted_hash).each do |item|
+ puts("#{item[0]}: #{item[1]}".ljust(60) + item[2]) if args.disp.eql? 'show'
+ puts("#{item[0]}: ". + item[2]) if args.disp.eql? 'hide'
+ end
+ elsif !ignored_paths.include?(split_path[0]) && !path_leads_to_list?(node.parent.path, native_original_hash, native_converted_hash)
+ puts("#{change}: #{node.to_xml}".ljust(60) + node.parent.path) if args.disp.eql? 'show'
+ puts("#{change}: ". + node.parent.path) if args.disp.eql? 'hide'
+ end
+ end
+ end
+
+ def path_leads_to_list?(path, org_hash, conv_hash)
+ # this method takes a path string (and the full original and converted hashes) and outputs true if the path string contains a list; else false
+ org_hash_path = hash_navigation(path, org_hash)
+ conv_hash_path = hash_navigation(path, conv_hash)
+
+ if path.include?("[") && path.include?("]")
+ bool = true
+ elsif org_hash_path.is_a?(Hash) && conv_hash_path.is_a?(Hash)
+ # the number of keys must be 1 because all arrays in echo10, dif10, and iso19115 are tagged similar to:
+ # contact and so all array-containing tags will be the plural
+ # of the array name. This clause serves to idenitfy array-containing tags when their paths aren't properly
+ # displayed by nokogiri
+ bool = true if org_hash_path.keys.length == 1 && org_hash_path[org_hash_path.keys[0]].is_a?(Array)
+ bool = true if conv_hash_path.keys.length == 1 && conv_hash_path[conv_hash_path.keys[0]].is_a?(Array)
+ elsif org_hash_path.is_a?(Array) || conv_hash_path.is_a?(Array)
+ bool = true
+ else
+ bool = false
+ end
+ bool
+ end
+
+ def hash_navigation(dir, hash)
+ # Passed a path string and the hash being navigated. This method parses the path string and
+ # returns the hash at the end of the path
+ dir = dir.split '/'
+ dir.each do |key|
+ if !key.empty? && hash.is_a?(Hash)
+ hash = hash[key]
+ elsif hash.is_a? Array
+ return hash
+ end
+ end
+ hash
+ end
+
+ def array_comparison(path, original_hash, converted_hash)
+
+ org_array = hash_navigation(path, original_hash)
+ conv_array = hash_navigation(path, converted_hash)
+
+ org_array.is_a?(Array) ? org_arr = org_array.clone : org_arr = Array.wrap(org_array)
+ org_array = Array.wrap(org_array) unless org_array.is_a?(Array)
+ conv_array.is_a?(Array) ? conv_arr = conv_array.clone : conv_arr = Array.wrap(conv_array)
+ conv_array = Array.wrap(conv_array) unless conv_array.is_a?(Array)
+
+ # org_arr and conv_arr are copies of org_array and conv_array, respectively.
+ # The *_arr values are edited during the comparison between the org_array and conv_array arrays
+ # and so the *_array arrays are used to maintained a full version of each array for indexing the items in the following lines.
+
+ for conv_item in conv_array
+ for org_item in org_array
+ if org_item.eql? conv_item
+ org_arr.delete(org_item)
+ conv_arr.delete(conv_item)
+ break
+ end
+ end
+ end
+
+ output = Array.new
+ org_arr.each do |item|
+ path_with_index = path + "[#{org_array.index(item)}]"
+ loss_item = ['-', item, path_with_index]
+ output << loss_item
+ end
+
+
+ conv_arr.each do |item|
+ path_with_index = path + "[#{conv_array.index(item)}]"
+ loss_item = ['+', item, path_with_index]
+ output << loss_item
+ end
+ output
+ end
+end
diff --git a/lib/tasks/translate_collections.rake b/lib/tasks/translate_collections.rake
deleted file mode 100644
index ec0ae90e8..000000000
--- a/lib/tasks/translate_collections.rake
+++ /dev/null
@@ -1,281 +0,0 @@
-require 'libxml_to_hash'
-
-namespace :collection do
- desc 'Translate a collection from native format to UMM JSON and back to native format'
- task :translate, [:file, :format, :disp, :version] => :environment do |_task, args|
- args.with_defaults(:version => '1.15.3')
- args.with_defaults(:disp => 'show')
-
- abort 'FORMAT INVALID' unless args.format.eql? ('echo10' || 'dif10' || 'iso19115')
-
- filename = args.file.split('/')[-1]
- puts "\nTranslating #{filename} to UMM JSON..."
-
- native_original_xml = File.read(args.file)
- native_original_hash = Hash.from_xml(native_original_xml)
-
- #translate to UMM
- umm_response = cmr_client.translate_collection(native_original_xml, "application/#{args.format}+xml", "application/vnd.nasa.cmr.umm+json;version=#{args.version}", skip_validation=true )
- umm_json = umm_response.body.to_json
- umm_response.success? ? puts("\nsuccessful translation to UMM") : abort("\nUMM translation failure")
-
- # translate back to native
- back_to_native = cmr_client.translate_collection(umm_json, "application/vnd.nasa.cmr.umm+json;version=#{args.version}", "application/#{args.format}+xml", skip_validation=true )
- native_converted_hash = Hash.from_xml(back_to_native.body)
- native_converted_xml = back_to_native.body
- back_to_native.success? ? puts("successful translation to native format \n\n") : abort("Native format translation failure \n\n")
-
- # nokogiri output
- nokogiri_original = Nokogiri::XML(native_original_xml) { |config| config.strict.noblanks }
- nokogiri_converted = Nokogiri::XML(native_converted_xml) { |config| config.strict.noblanks }
-
- nokogiri_original.diff(nokogiri_converted, {:added => true, :removed => true}) do |change,node|
- next if path_leads_to_list?(node.parent.path, native_original_hash, native_converted_hash)
- puts("#{change}: #{node.to_xml}".ljust(60) + node.parent.path) if args.disp.eql? 'show'
- puts("#{change}: ". + node.parent.path) if args.disp.eql? 'hide'
- end
-
- # find differences
- dif_hash = find_difference_bt_hashes(native_original_hash, native_converted_hash)
- compare_arrays(dif_hash, native_original_hash, native_converted_hash)
-
- end
-
- def path_leads_to_list?(path, org_hash, conv_hash)
- # this method takes a path string (and the full original and converted hashes) and outputs true if the path string contains a list; else false
- org_hash_path = hash_navigation(path, org_hash)
- conv_hash_path = hash_navigation(path, conv_hash)
-
- return false if org_hash_path == false || conv_hash_path == false
-
- if path.include?("[") && path.include?("]")
- bool = true
- elsif org_hash_path.is_a?(Hash) && conv_hash_path.is_a?(Hash)
- org_hash_path.keys.each { |key| bool = true; break if org_hash_path[key].is_a?(Array) }
- conv_hash_path.keys.each { |key| bool = true; break if conv_hash_path[key].is_a?(Array) }
- elsif org_hash_path.is_a?(Array) || conv_hash_path.is_a?(Array)
- bool = true
- else
- bool = false
- end
- bool
- end
-
- def hash_navigation(dir, hash)
- # Passed a path string and the hash being navigated. This method parses the path string and
- # returns the hash at the end of the path
- dir = dir.split("/")
- if dir.is_a? Array
- dir.each do |key|
- if !key.empty? && hash.is_a?(Hash)
- hash = hash[key]
- elsif hash.is_a? Array
- return false
- end
- end
- else
- hash = hash[dir]
- end
- hash
- end
-
- def get_list_paths(dif_hash, original, converted)
- # arguments: differences hash, the original hash, and converted hash
- # Using these 3 hashses, all paths that lead to a list are returned as an array of path strings
- values_list = hash_to_list_of_values(dif_hash)
- paths = Array.new
-
- for item in values_list
- org_path = get_dir(item, original)
- conv_path = get_dir(item, converted)
-
- if org_path.include? "[]"
- path = org_path
- elsif conv_path.include? "[]"
- path = conv_path
- else
- path = org_path
- end
-
- # the get_dir method includes a clause that 'tags' array-containing fields with '[]'
- # eg. '/Collection/Contacts/Contact[]/OrganizationEmails/Email'
- # the following lines show how this 'tagging' is used to identify an array in a given directory
-
- if path.include? "[]"
- path = path.split "[]"
- paths << path[0] unless paths.any? { |p| p.eql? path[0] }
- elsif path_leads_to_list?(path, original, converted)
- paths << path unless paths.any? { |p| p.eql? path }
- end
- end
- paths
- end
-
- def compare_arrays(dif_hash, original, converted)
- # arguments: differences hash, the original hash, and converted hash
- # each path that leads to an array is used to navigate to that array and
- # subsequently compare the arrays in the original and converted hashes.
- # there is no usable ouput; there is printing to the terminal
- paths = get_list_paths(dif_hash, original, converted)
-
- paths.each do |path|
- org_array = hash_navigation(path, original)
- conv_array = hash_navigation(path, converted)
-
- org_array.is_a?(Array) ? org_arr = Array.wrap(org_array) : org_arr = org_array.clone
- org_array = Array.wrap(org_array) unless org_array.is_a?(Array)
- conv_array.is_a?(Array) ? conv_arr = Array.wrap(conv_array) : conv_arr = conv_array.clone
- conv_array = Array.wrap(conv_array) unless conv_array.is_a?(Array)
-
- for conv_item in conv_array
- for org_item in org_array
- if org_item.eql? conv_item
- org_arr.delete(org_item)
- conv_arr.delete(conv_item)
- break
- end
- end
- end
-
- org_arr.each do |item|
- path_with_index = path + "[#{org_array.index(item)}]"
- puts "-: ". + path_with_index
- end
-
- conv_arr.each do |item|
- path_with_index = path + "[#{conv_array.index(item)}]"
- puts "+: " + path_with_index
- end
- end
- end
-
- def find_difference_bt_hash_arrays(org_arr, conv_arr)
- # array inputs; the output is an array that contains the items in the original array
- # that were not found in the converted array
- org = org_arr.clone
- conv = conv_arr.clone
- missing = Array.new
- if org.eql? conv
- return missing
- else
- for conv_item in conv
- for org_item in org
- if org_item.eql? conv_item
- org.delete(conv_item)
- break
- end
- end
- end
- missing += org
- end
- missing
- end
-
- def find_difference_bt_hashes(org, conv)
- # input is the original hash and the converted hash; the output is the
- # 'differences hash' which represents the items in the original hash that were
- # not found in the converted hash
- missing = Hash.new
- if org.eql? conv
- return missing
- else
- org.each do |org_key,org_value|
- if (conv_value = conv[org_key])
- if conv_value.eql? org_value
- next
- elsif org_value.is_a?(Hash) && conv_value.is_a?(Hash)
- missing_value = find_difference_bt_hashes(org_value, conv_value)
- unless missing_value.empty?
- missing[org_key] = missing_value
- end
- elsif org_value.is_a?(Array) && conv_value.is_a?(Array)
- missing_value = find_difference_bt_hash_arrays(org_value, conv_value)
- unless missing_value.empty?
- missing[org_key] = missing_value
- end
- else
- missing[org_key] = org_value
- end
- else
- missing[org_key] = org_value
- end
- end
- end
- missing
- end
-
- def get_dir(value, hash_or_arr)
- # passing the sought-after value and the hash or array being parsed
- # output: a single string representing the path to the value arg passed to this method
- iterable = hash_or_arr.clone
- dir = String.new
- if iterable.is_a? Hash
- if (matching_key = iterable.key(value))
- dir += '/' + matching_key
- iterable.delete(matching_key)
- return dir
- else
- iterable.each do |key,val|
- if val.is_a?(Hash) && hash_to_list_of_values(val).include?(value)
- dir += '/' + key
- dir += get_dir(value, val)
- return dir
- elsif val.is_a?(Array) && array_to_list_of_values(val).include?(value)
- dir += '/' + key + "[]"
- dir += get_dir(value, val)
- return dir
- elsif val.eql? value
- dir += '/' + key
- iterable.delete(key)
- return dir
- end
- end
- end
- elsif iterable.is_a? Array
- iterable.each do |item|
- if item.is_a?(Hash) && hash_to_list_of_values(item).include?(value)
- dir += get_dir(value,item)
- return dir
- elsif item.is_a?(Array) && array_to_list_of_values(item).include?(value)
- dir += get_dir(value,item) + "[]"
- return dir
- end
- end
- end
- dir
- end
-
- def hash_to_list_of_values(hash)
- # converts a highly nested hash to a list of all its values
- list = Array.new
- for val in hash.values
- if val.is_a? Hash
- list += hash_to_list_of_values(val)
- elsif val.is_a? Array
- list += array_to_list_of_values(val)
- else
- list << val
- end
- end
- list
- end
-
- def array_to_list_of_values(array)
- #converts a highly nested array to a list of all its values
- ls = Array.new
- for item in array
- if item.is_a? Hash
- ls += hash_to_list_of_values(item)
- elsif item.is_a? Array
- ls += array_to_list_of_values(item)
- else
- ls << item
- end
- end
- ls
- end
-
- def cmr_client
- @cmr_client ||= Cmr::Client.client_for_environment(Rails.configuration.cmr_env, Rails.configuration.services)
- end
-end
From e23bb6a681d40a9ecc2436dfceebfcdfe939d133 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Thu, 16 Jul 2020 15:12:21 -0400
Subject: [PATCH 21/49] MMT-2313: adjusted whitespace
---
app/helpers/loss_report_helper.rb | 2 ++
1 file changed, 2 insertions(+)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index fd946c95d..369c62d8a 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -53,11 +53,13 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
end
end
end
+
if disp == 'text'
return comparison_string
elsif disp == 'json'
return comparison_hash
end
+
end
def hash_map(hash)
From 04ce818957be9be9aa6f65cd4fe5cec143c0d936 Mon Sep 17 00:00:00 2001
From: ryanmiller-1 <40173609+ryanmiller-1@users.noreply.github.com>
Date: Fri, 17 Jul 2020 08:49:46 -0400
Subject: [PATCH 22/49] MMT-2232/2233 Revision viewing and reversion for tools
(#617)
* MMT-2232 MMT-2233 adding view, revert, and reinstate for tools
* MMT-2232 updating a test that is causing intermittent failures.
* MMT-2232 removing erroneous comment in test
* MMT-2232 fixing response body in altered subscription test
* MMT-2232 flipping feature toggles for UMM-T in UAT and prod
* MMT-2232 Replacing 'Service' with 'Tool' in tests.
* MMT-2232 updating test to clean up correctly
* MMT-2232 VCRing the subscription failed delete test
---
.../change_current_provider.coffee | 6 +
app/controllers/tools_controller.rb | 2 +-
.../_not_current_provider_modal.html.erb | 2 +
app/views/tools/revisions.html.erb | 26 +-
config/environments/production.rb | 2 +-
config/environments/uat.rb | 2 +-
config/locales/en.yml | 4 +
config/routes.rb | 1 +
.../delete_subscriptions_spec.rb | 21 +-
.../services/reverting_service_spec.rb | 2 -
spec/features/tools/reverting_tool_spec.rb | 169 ++++++++
spec/features/tools/revision_list_spec.rb | 58 ++-
spec/vcr/subscriptions/failed_delete.yml | 398 ++++++++++++++++++
13 files changed, 640 insertions(+), 53 deletions(-)
create mode 100644 spec/features/tools/reverting_tool_spec.rb
create mode 100644 spec/vcr/subscriptions/failed_delete.yml
diff --git a/app/assets/javascripts/change_current_provider.coffee b/app/assets/javascripts/change_current_provider.coffee
index 08298d76b..54255c96e 100644
--- a/app/assets/javascripts/change_current_provider.coffee
+++ b/app/assets/javascripts/change_current_provider.coffee
@@ -78,6 +78,12 @@ $(document).ready ->
'Cloning this tool'
when 'delete-tool'
'Deleting this tool'
+ when 'reinstate-tool'
+ action = 'revert'
+ 'Reinstating this tool'
+ when 'revert-tool'
+ action = 'revert'
+ 'Reverting this tool'
$link.data('type', action)
$modal.find('span.provider').text(provider)
diff --git a/app/controllers/tools_controller.rb b/app/controllers/tools_controller.rb
index 65fe35c1e..450fd0a80 100644
--- a/app/controllers/tools_controller.rb
+++ b/app/controllers/tools_controller.rb
@@ -2,7 +2,7 @@
class ToolsController < BasePublishedRecordController
include ManageMetadataHelper
- before_action :set_tool, only: [:show, :edit, :clone, :destroy, :revisions, :download_json] #, :revert]
+ before_action :set_tool, only: [:show, :edit, :clone, :destroy, :revisions, :revert, :download_json]
before_action :set_schema, only: [:show, :edit, :clone, :destroy]
before_action :ensure_supported_version, only: [:show, :edit]
before_action :ensure_correct_provider, only: [:edit, :clone, :destroy]
diff --git a/app/views/shared/_not_current_provider_modal.html.erb b/app/views/shared/_not_current_provider_modal.html.erb
index 6f828700c..5f71b9558 100644
--- a/app/views/shared/_not_current_provider_modal.html.erb
+++ b/app/views/shared/_not_current_provider_modal.html.erb
@@ -14,6 +14,8 @@
<%= link_to 'Yes', revert_variable_path(options[:concept_id], revision_id: options[:revision_id]), class: 'eui-btn--blue spinner is-invisible', id: "not-current-provider-revert-link#{'-' + modal_index.to_s if modal_index}" %>
<% elsif options[:service] %>
<%= link_to 'Yes', revert_service_path(options[:concept_id], revision_id: options[:revision_id]), class: 'eui-btn--blue spinner is-invisible', id: "not-current-provider-revert-link#{'-' + modal_index.to_s if modal_index}" %>
+ <% elsif options[:tool] %>
+ <%= link_to 'Yes', revert_tool_path(options[:concept_id], revision_id: options[:revision_id]), class: 'eui-btn--blue spinner is-invisible', id: "not-current-provider-revert-link#{'-' + modal_index.to_s if modal_index}" %>
<% else %>
<%= link_to 'Yes', revert_collection_path(options[:concept_id], revision_id: options[:revision_id]), class: 'eui-btn--blue spinner is-invisible', id: 'not-current-provider-revert-link' %>
<% end %>
diff --git a/app/views/tools/revisions.html.erb b/app/views/tools/revisions.html.erb
index 9c4f2a277..8a861ac4c 100644
--- a/app/views/tools/revisions.html.erb
+++ b/app/views/tools/revisions.html.erb
@@ -58,12 +58,10 @@
Deleted
<% elsif index == 0 %>
Published
- <%# Uncomment in MMT-2232 %>
- <%#= link_to 'View', tool_path(revision_id: revision_id), title: title %>
+ <%= link_to 'View', tool_path(revision_id: revision_id), title: title %>
<% else %>
Revision
- <%# Uncomment in MMT-2232 %>
- <%#= link_to 'View', tool_path(revision_id: revision_id), title: title %>
+ <%= link_to 'View', tool_path(revision_id: revision_id), title: title %>
<% end %>
@@ -84,21 +82,20 @@
<% end %>
<% unless index == 0 || revision['meta']['deleted'] == true %>
- <%# Uncomment in MMT-2233 %>
- <%# if current_provider?(@provider_id) %>
- <%#= link_to phrase, "#revert-revisions-modal-#{revision_id}", class: 'display-modal' %>
- <%# elsif available_provider?(@provider_id) %>
- <%#= link_to phrase, "#not-current-provider-modal-#{revision_id}", class: 'display-modal not-current-provider', data: { 'provider': @provider_id, record_action: action } %>
- <%# end %>
-
<% end %>
|
diff --git a/config/environments/production.rb b/config/environments/production.rb
index dc5511069..662e251d8 100644
--- a/config/environments/production.rb
+++ b/config/environments/production.rb
@@ -135,7 +135,7 @@
config.subscriptions_enabled = true
# Feature toggle for UMM-T
- config.umm_t_enabled = false
+ config.umm_t_enabled = true
config.cmr_env = 'ops'
config.echo_env = 'ops'
diff --git a/config/environments/uat.rb b/config/environments/uat.rb
index 009094283..43a54ba41 100644
--- a/config/environments/uat.rb
+++ b/config/environments/uat.rb
@@ -115,7 +115,7 @@
config.csplog_enabled = false
# Feature toggle for UMM-T
- config.umm_t_enabled = false
+ config.umm_t_enabled = true
config.cmr_env = 'uat'
config.echo_env = 'uat'
diff --git a/config/locales/en.yml b/config/locales/en.yml
index 3ef4ec0cb..3201d3cf6 100644
--- a/config/locales/en.yml
+++ b/config/locales/en.yml
@@ -205,6 +205,10 @@ en:
clone:
flash:
notice: 'Records must have a unique Name and Long Name within a provider. Click here to enter a new Name and Long Name.'
+ revert:
+ flash:
+ success: 'Tool Revision Created Successfully!'
+ error: 'Tool revision was not created successfully'
collection_associations:
destroy:
flash:
diff --git a/config/routes.rb b/config/routes.rb
index 734198410..2ae58ed56 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -107,6 +107,7 @@
get '/tools/:id/clone' => 'tools#clone', as: 'clone_tool'
get '/tools/:id/revisions' => 'tools#revisions', as: 'tool_revisions'
get '/tools/:id/download_json(/:revision_id)' => 'tools#download_json', as: 'download_json_tool'
+ get '/tools/:id/revert/:revision_id' => 'tools#revert', as: 'revert_tool'
resources :variable_drafts, controller: 'variable_drafts', draft_type: 'VariableDraft' do
member do
diff --git a/spec/features/manage_cmr/subscriptions/delete_subscriptions_spec.rb b/spec/features/manage_cmr/subscriptions/delete_subscriptions_spec.rb
index 8c4f366fd..bb2599082 100644
--- a/spec/features/manage_cmr/subscriptions/delete_subscriptions_spec.rb
+++ b/spec/features/manage_cmr/subscriptions/delete_subscriptions_spec.rb
@@ -53,13 +53,28 @@
context 'when failing to delete a subscription' do
before do
- # Generate an error message by deleting it underneath the 'user'
- cmr_client.delete_subscription('MMT_2', @native_id, 'token')
click_on 'Delete'
- VCR.use_cassette('urs/rarxd5taqea', record: :none) do
+ # Using 'allow_any_instance_of' causes the after delete to fail as well.
+ # Need localhost to mock the CMR delete response to be an error.
+ VCR.configure do |c|
+ c.ignore_localhost = false
+ end
+
+ VCR.use_cassette('subscriptions/failed_delete', erb: { concept_id: @ingest_response['concept_id'] }) do
click_on 'Yes'
end
+
+ VCR.configure do |c|
+ c.ignore_localhost = true
+ end
+ end
+
+ # TODO: Remove after CMR-6332
+ after do
+ delete_response = cmr_client.delete_subscription('MMT_2', @native_id, 'token')
+
+ raise unless delete_response.success?
end
it 'fails to delete the record' do
diff --git a/spec/features/services/reverting_service_spec.rb b/spec/features/services/reverting_service_spec.rb
index 5f0510878..d8024aa28 100644
--- a/spec/features/services/reverting_service_spec.rb
+++ b/spec/features/services/reverting_service_spec.rb
@@ -1,5 +1,3 @@
-require 'rails_helper'
-
describe 'Reverting to previous services', reset_provider: true, js: true do
before :all do
# service for simple reverting service test
diff --git a/spec/features/tools/reverting_tool_spec.rb b/spec/features/tools/reverting_tool_spec.rb
new file mode 100644
index 000000000..a2ba6fa09
--- /dev/null
+++ b/spec/features/tools/reverting_tool_spec.rb
@@ -0,0 +1,169 @@
+describe 'Reverting to previous tools', reset_provider: true, js: true do
+ before :all do
+ # tool for simple reverting tool test
+ @simple_revert_ingest_response, @simple_revert_concept_response, @native_id = publish_tool_draft(revision_count: 2)
+
+ # tool for reverting tool with many revisions
+ @multiple_revisions_ingest_response, @multiple_revisions_concept_response, @native_id2 = publish_tool_draft(revision_count: 4, long_name: 'Reverting Tools Test', number_revision_long_names: true)
+ end
+
+ after :all do
+ delete_response = cmr_client.delete_tool('MMT_2', @native_id, 'token')
+ delete_response2 = cmr_client.delete_tool('MMT_2', @native_id2, 'token')
+
+ raise unless delete_response.success? && delete_response2.success?
+ end
+
+ before do
+ login
+ end
+
+ context 'when the latest revision is a published tool' do
+ before do
+ visit tool_path(@simple_revert_ingest_response['concept-id'])
+
+ click_on 'Revisions'
+ end
+
+ it 'displays the correct phrasing for reverting records' do
+ expect(page).to have_content('Revert to this Revision', count: 1)
+ end
+
+ context 'when reverting the tool' do
+ before do
+ click_on 'Revert to this Revision'
+ click_on 'Yes'
+
+ wait_for_jQuery
+ wait_for_cmr
+ end
+
+ it 'displays all the correct revision information' do
+ expect(page).to have_content('Revision Created Successfully!')
+
+ expect(page).to have_content('Published', count: 1)
+ expect(page).to have_content('Revision View', count: 2)
+ expect(page).to have_content('Revert to this Revision', count: 2)
+ end
+ end
+
+ context 'when reverting to a revision before the previous revision from a different provider context' do
+ context 'when visiting the revisions page from a different provider' do
+ before do
+ login(provider: 'MMT_1', providers: %w(MMT_1 MMT_2))
+
+ visit tool_revisions_path(@multiple_revisions_ingest_response['concept-id'])
+ end
+
+ it 'displays all the correct revision information' do
+ within 'main header' do
+ expect(page).to have_content('Reverting Tools Test -- revision 04')
+ end
+
+ expect(page).to have_content('Published', count: 1)
+ expect(page).to have_content('Revision View', count: 3)
+ expect(page).to have_content('Revert to this Revision', count: 3)
+ end
+
+ context 'when reverting to the earliest revision' do
+ before do
+ visit tool_revisions_path(@multiple_revisions_ingest_response['concept-id'])
+
+ within '#tool-revisions-table tbody tr:last-child' do
+ # make sure we are clicking on the correct link
+ expect(page).to have_content('1 - Revision')
+
+ click_on 'Revert to this Revision'
+ end
+ end
+
+ it 'displays a modal informing the user they need to switch providers' do
+ expect(page).to have_content('Reverting this tool requires you change your provider context to MMT_2')
+ end
+
+ context 'when clicking Yes' do
+ before do
+ find('.not-current-provider-link').click
+ wait_for_jQuery
+ end
+
+ it 'reverts the tool to the correct revision and displays the correct revision information and switches provider context' do
+ within 'main header' do
+ expect(page).to have_content('Reverting Tools Test -- revision 01')
+ end
+
+ expect(page).to have_content('Published', count: 1)
+ expect(page).to have_content('Revision View', count: 4)
+ expect(page).to have_content('Revert to this Revision', count: 4)
+
+ expect(User.first.provider_id).to eq('MMT_2')
+ end
+ end
+ end
+ end
+ end
+
+ context 'when reverting the tool fails ingestion into CMR' do
+ before do
+ # Do something to the revision so it fails
+ # Add a new field to the metadata, similar to a field name changing
+ # and old metadata still having the old field name
+ new_concept = @simple_revert_concept_response.deep_dup
+ new_concept.body['BadField'] = 'Not going to work'
+
+ allow_any_instance_of(Cmr::CmrClient).to receive(:get_concept).and_return(new_concept)
+
+ click_on 'Revert to this Revision', match: :first
+ click_on 'Yes'
+
+ wait_for_jQuery
+ wait_for_cmr
+ end
+
+ it 'displays an error message' do
+ expect(page).to have_content('extraneous key [BadField] is not permitted')
+ end
+ end
+ end
+
+ context 'when the latest revision is a deleted tool' do
+ before do
+ ingest_response, _concept_response, @native_id3 = publish_tool_draft
+
+ cmr_client.delete_tool('MMT_2', @native_id3, 'token')
+ wait_for_cmr
+
+ visit tool_revisions_path(ingest_response['concept-id'])
+ end
+
+ it 'displays the correct phrasing for reverting records' do
+ expect(page).to have_content('Reinstate', count: 1)
+ end
+
+ context 'when reverting the tool' do
+ before do
+ click_on 'Reinstate'
+ click_on 'Yes'
+
+ wait_for_jQuery
+ wait_for_cmr
+ end
+
+ # TODO: remove after CMR-6332
+ after do
+ delete_response = cmr_client.delete_tool('MMT_2', @native_id3, 'token')
+
+ raise unless delete_response.success?
+ end
+
+ it 'displays all the correct revision information' do
+ expect(page).to have_content('Revision Created Successfully!')
+
+ expect(page).to have_content('Published', count: 1)
+ expect(page).to have_content('Deleted', count: 1)
+ expect(page).to have_content('Revision View', count: 1)
+ expect(page).to have_content('Revert to this Revision', count: 1)
+ end
+ end
+ end
+end
diff --git a/spec/features/tools/revision_list_spec.rb b/spec/features/tools/revision_list_spec.rb
index 0ef91f2a9..aca40ffef 100644
--- a/spec/features/tools/revision_list_spec.rb
+++ b/spec/features/tools/revision_list_spec.rb
@@ -51,36 +51,34 @@
expect(page).to have_content('11 - Published')
end
-# TODO: Uncomment in MMT-2233
-# it 'displays the correct phrasing for reverting records' do
-# expect(page).to have_content('Revert to this Revision', count: 1)
-# end
-
-# TODO: Uncomment in MMT-2232
-# context 'when viewing an old revision' do
-# link_text = 'You are viewing an older revision of this tool. Click here to view the latest published version.'
-# before do
-# all('a', text: 'View').last.click
-# end
-#
-# it 'displays a message that the revision is old' do
-# expect(page).to have_link(link_text)
-# end
-#
-# it 'does not display a link to manage collection associations' do
-# expect(page).to have_no_link('Manage Collection Associations')
-# end
-#
-# context 'when clicking the message' do
-# before do
-# click_on link_text
-# end
-#
-# it 'displays the latest revision to the user' do
-# expect(page).to have_no_link(link_text)
-# end
-# end
-# end
+ it 'displays the correct phrasing for reverting records' do
+ expect(page).to have_content('Revert to this Revision', count: 9)
+ end
+
+ context 'when viewing an old revision' do
+ link_text = 'You are viewing an older revision of this tool. Click here to view the latest published version.'
+ before do
+ all('a', text: 'View').last.click
+ end
+
+ it 'displays a message that the revision is old' do
+ expect(page).to have_link(link_text)
+ end
+
+ it 'does not display a link to manage collection associations' do
+ expect(page).to have_no_link('Manage Collection Associations')
+ end
+
+ context 'when clicking the message' do
+ before do
+ click_on link_text
+ end
+
+ it 'displays the latest revision to the user' do
+ expect(page).to have_no_link(link_text)
+ end
+ end
+ end
end
context 'when searching for the tool' do
diff --git a/spec/vcr/subscriptions/failed_delete.yml b/spec/vcr/subscriptions/failed_delete.yml
new file mode 100644
index 000000000..ba6eb1f5f
--- /dev/null
+++ b/spec/vcr/subscriptions/failed_delete.yml
@@ -0,0 +1,398 @@
+---
+http_interactions:
+- request:
+ method: get
+ uri: https://sit.urs.earthdata.nasa.gov/api/users?uids%5B%5D=rarxd5taqea
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Faraday v0.8.11
+ Authorization:
+ - Bearer access_token
+ response:
+ status:
+ code: 200
+ message:
+ headers:
+ server:
+ - nginx/1.10.2
+ date:
+ - Tue, 02 May 2017 18:45:16 GMT
+ content-type:
+ - application/json; charset=utf-8
+ transfer-encoding:
+ - chunked
+ connection:
+ - close
+ x-frame-options:
+ - SAMEORIGIN
+ x-xss-protection:
+ - 1; mode=block
+ x-content-type-options:
+ - nosniff
+ etag:
+ - W/"a0ea6aa58a4b79e873d6cb66d9d5bfb7"
+ cache-control:
+ - max-age=0, private, must-revalidate
+ x-request-id:
+ - 37c1c4e1-f4a7-4af5-a94b-0b51d798342b
+ x-runtime:
+ - '0.024303'
+ strict-transport-security:
+ - max-age=31536000
+ body:
+ encoding: UTF-8
+ string: '{"users":[{"uid":"rarxd5taqea","first_name":"Rvrhzxhtra","last_name":"Vetxvbpmxf","email_address":"uozydogeyyyujukey@tjbh.eyyy","country":"Macedonia","study_area":null,"registered_date":"2012-08-29T11:02:42.000Z","allow_auth_app_emails":true}]}'
+ http_version:
+ recorded_at: Tue, 02 May 2017 18:45:16 GMT
+- request:
+ method: delete
+ uri: http://localhost:3002/providers/MMT_2/subscriptions/test_native_id
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Faraday v1.0.1
+ Content-Type:
+ - application/vnd.nasa.cmr.umm+json
+ Client-Id:
+ - MMT
+ Echo-Token:
+ - ABC-2
+ response:
+ status:
+ code: 500
+ message: OK
+ headers:
+ date:
+ - Thu, 16 Jul 2020 17:19:54 GMT
+ content-type:
+ - application/xml
+ cmr-request-id:
+ - 88dd6d01-aa41-46fc-828d-705d7dec1bbb
+ x-request-id:
+ - 88dd6d01-aa41-46fc-828d-705d7dec1bbb
+ content-length:
+ - '148'
+ server:
+ - Jetty(9.4.z-SNAPSHOT)
+ body:
+ encoding: UTF-8
+ string: '{"errors" => ["Concept with native-id [test_native_id] and concept-id
+ [<%= concept_id %>] is already deleted."] }'
+ http_version:
+ recorded_at: Thu, 16 Jul 2020 17:19:54 GMT
+- request:
+ method: get
+ uri: http://localhost:3011/permissions?provider=MMT_2&target=NON_NASA_DRAFT_APPROVER&user_id=testuser
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Faraday v1.0.1
+ Client-Id:
+ - MMT
+ Accept:
+ - application/json; charset=utf-8
+ Echo-Token:
+ - ABC-2
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ date:
+ - Thu, 16 Jul 2020 17:30:49 GMT
+ cmr-request-id:
+ - 866be3e9-3242-45b5-9d0c-bd8df58c0ccf
+ x-request-id:
+ - 866be3e9-3242-45b5-9d0c-bd8df58c0ccf
+ vary:
+ - Accept-Encoding, User-Agent
+ content-length:
+ - '30'
+ server:
+ - Jetty(9.4.z-SNAPSHOT)
+ body:
+ encoding: UTF-8
+ string: '{"NON_NASA_DRAFT_APPROVER":[]}'
+ http_version:
+ recorded_at: Thu, 16 Jul 2020 17:30:49 GMT
+- request:
+ method: get
+ uri: http://localhost:3003/subscriptions.umm_json?concept_id=<%= concept_id %>
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Faraday v1.0.1
+ Client-Id:
+ - MMT
+ Echo-Token:
+ - ABC-2
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ date:
+ - Thu, 16 Jul 2020 17:30:49 GMT
+ content-type:
+ - application/vnd.nasa.cmr.umm_results+json;version=1.0; charset=utf-8
+ access-control-expose-headers:
+ - CMR-Hits, CMR-Request-Id, X-Request-Id, CMR-Scroll-Id, CMR-Timed-Out, CMR-Shapefile-Original-Point-Count,
+ CMR-Shapefile-Simplified-Point-Count
+ access-control-allow-origin:
+ - "*"
+ cmr-hits:
+ - '1'
+ cmr-took:
+ - '10'
+ cmr-request-id:
+ - ea5ee97c-0b1f-493d-87da-48a3816d4030
+ x-request-id:
+ - ea5ee97c-0b1f-493d-87da-48a3816d4030
+ content-length:
+ - '553'
+ server:
+ - Jetty(9.4.z-SNAPSHOT)
+ body:
+ encoding: UTF-8
+ string: '{"hits":1,"took":10,"items":[{"meta":{"revision-id":14,"deleted":false,"format":"application/vnd.nasa.cmr.umm+json","provider-id":"MMT_2","user-id":"typical","native-id":"test_native_id","concept-id":"<%= concept_id %>","revision-date":"2020-07-16T17:30:48Z","concept-type":"subscription"},"umm":{"Name":"Test_Subscription_38f5c2b5-4192-4866-bcd4-413452e27b65","CollectionConceptId":"C520536-TEST","Query":"bounding_box=-10,-5,10,5&attribute\\[\\]=float,PERCENTAGE,25.5,30","SubscriberId":"rarxd5taqea","EmailAddress":"uozydogeyyyujukey@tjbh.eyyy"}}]}'
+ http_version:
+ recorded_at: Thu, 16 Jul 2020 17:30:49 GMT
+- request:
+ method: get
+ uri: http://localhost:3011/permissions?provider=MMT_2&target=SUBSCRIPTION_MANAGEMENT&user_id=testuser
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Faraday v1.0.1
+ Client-Id:
+ - MMT
+ Accept:
+ - application/json; charset=utf-8
+ Echo-Token:
+ - ABC-2
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ date:
+ - Thu, 16 Jul 2020 17:30:49 GMT
+ cmr-request-id:
+ - 0daa280f-6586-46b6-b186-bffcb8409009
+ x-request-id:
+ - 0daa280f-6586-46b6-b186-bffcb8409009
+ vary:
+ - Accept-Encoding, User-Agent
+ content-length:
+ - '45'
+ server:
+ - Jetty(9.4.z-SNAPSHOT)
+ body:
+ encoding: UTF-8
+ string: '{"SUBSCRIPTION_MANAGEMENT":["read","update"]}'
+ http_version:
+ recorded_at: Thu, 16 Jul 2020 17:30:49 GMT
+- request:
+ method: get
+ uri: http://localhost:3011/permissions?provider=MMT_2&target=NON_NASA_DRAFT_APPROVER&user_id=testuser
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Faraday v1.0.1
+ Client-Id:
+ - MMT
+ Accept:
+ - application/json; charset=utf-8
+ Echo-Token:
+ - ABC-2
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ date:
+ - Thu, 16 Jul 2020 17:30:49 GMT
+ cmr-request-id:
+ - 380e3f0f-b47d-4b32-9317-f28a2141f249
+ x-request-id:
+ - 380e3f0f-b47d-4b32-9317-f28a2141f249
+ vary:
+ - Accept-Encoding, User-Agent
+ content-length:
+ - '30'
+ server:
+ - Jetty(9.4.z-SNAPSHOT)
+ body:
+ encoding: UTF-8
+ string: '{"NON_NASA_DRAFT_APPROVER":[]}'
+ http_version:
+ recorded_at: Thu, 16 Jul 2020 17:30:49 GMT
+- request:
+ method: get
+ uri: http://localhost:3011/permissions?system_object=ANY_ACL&user_id=testuser
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Faraday v1.0.1
+ Client-Id:
+ - MMT
+ Accept:
+ - application/json; charset=utf-8
+ Echo-Token:
+ - ABC-2
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ date:
+ - Thu, 16 Jul 2020 17:30:49 GMT
+ cmr-request-id:
+ - 2e2d7e7c-57f1-41e1-b8c6-7daedce0a6da
+ x-request-id:
+ - 2e2d7e7c-57f1-41e1-b8c6-7daedce0a6da
+ vary:
+ - Accept-Encoding, User-Agent
+ content-length:
+ - '20'
+ server:
+ - Jetty(9.4.z-SNAPSHOT)
+ body:
+ encoding: UTF-8
+ string: '{"ANY_ACL":["read"]}'
+ http_version:
+ recorded_at: Thu, 16 Jul 2020 17:30:49 GMT
+- request:
+ method: get
+ uri: http://localhost:3011/permissions?provider=MMT_2&target=PROVIDER_OBJECT_ACL&user_id=testuser
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Faraday v1.0.1
+ Client-Id:
+ - MMT
+ Accept:
+ - application/json; charset=utf-8
+ Echo-Token:
+ - ABC-2
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ date:
+ - Thu, 16 Jul 2020 17:30:49 GMT
+ cmr-request-id:
+ - 24edd338-d818-4bad-9c2f-26aa216b5d48
+ x-request-id:
+ - 24edd338-d818-4bad-9c2f-26aa216b5d48
+ vary:
+ - Accept-Encoding, User-Agent
+ content-length:
+ - '59'
+ server:
+ - Jetty(9.4.z-SNAPSHOT)
+ body:
+ encoding: UTF-8
+ string: '{"PROVIDER_OBJECT_ACL":["read","create","update","delete"]}'
+ http_version:
+ recorded_at: Thu, 16 Jul 2020 17:30:49 GMT
+- request:
+ method: get
+ uri: http://localhost:3003/subscriptions.umm_json?concept_id=<%= concept_id %>
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Faraday v1.0.1
+ Client-Id:
+ - MMT
+ Echo-Token:
+ - ABC-2
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ date:
+ - Thu, 16 Jul 2020 17:30:49 GMT
+ content-type:
+ - application/vnd.nasa.cmr.umm_results+json;version=1.0; charset=utf-8
+ access-control-expose-headers:
+ - CMR-Hits, CMR-Request-Id, X-Request-Id, CMR-Scroll-Id, CMR-Timed-Out, CMR-Shapefile-Original-Point-Count,
+ CMR-Shapefile-Simplified-Point-Count
+ access-control-allow-origin:
+ - "*"
+ cmr-hits:
+ - '1'
+ cmr-took:
+ - '11'
+ cmr-request-id:
+ - f64a50a3-8a1d-4bfc-be6e-23ed95ce6d73
+ x-request-id:
+ - f64a50a3-8a1d-4bfc-be6e-23ed95ce6d73
+ content-length:
+ - '553'
+ server:
+ - Jetty(9.4.z-SNAPSHOT)
+ body:
+ encoding: UTF-8
+ string: '{"hits":1,"took":10,"items":[{"meta":{"revision-id":14,"deleted":false,"format":"application/vnd.nasa.cmr.umm+json","provider-id":"MMT_2","user-id":"typical","native-id":"test_native_id","concept-id":"<%= concept_id %>","revision-date":"2020-07-16T17:30:48Z","concept-type":"subscription"},"umm":{"Name":"Test_Subscription_38f5c2b5-4192-4866-bcd4-413452e27b65","CollectionConceptId":"C520536-TEST","Query":"bounding_box=-10,-5,10,5&attribute\\[\\]=float,PERCENTAGE,25.5,30","SubscriberId":"rarxd5taqea","EmailAddress":"uozydogeyyyujukey@tjbh.eyyy"}}]}'
+ http_version:
+ recorded_at: Thu, 16 Jul 2020 17:30:49 GMT
+- request:
+ method: get
+ uri: http://localhost:3011/permissions?provider=MMT_2&target=SUBSCRIPTION_MANAGEMENT&user_id=testuser
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Faraday v1.0.1
+ Client-Id:
+ - MMT
+ Accept:
+ - application/json; charset=utf-8
+ Echo-Token:
+ - ABC-2
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ date:
+ - Thu, 16 Jul 2020 17:30:49 GMT
+ cmr-request-id:
+ - 9faa6495-6144-42e8-9624-beed6443069a
+ x-request-id:
+ - 9faa6495-6144-42e8-9624-beed6443069a
+ vary:
+ - Accept-Encoding, User-Agent
+ content-length:
+ - '45'
+ server:
+ - Jetty(9.4.z-SNAPSHOT)
+ body:
+ encoding: UTF-8
+ string: '{"SUBSCRIPTION_MANAGEMENT":["read","update"]}'
+ http_version:
+ recorded_at: Thu, 16 Jul 2020 17:30:49 GMT
+recorded_with: VCR 5.1.0
From 50c0c0f46e497d2f37b4971cf43b0b68c31c3c75 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Fri, 17 Jul 2020 09:05:19 -0400
Subject: [PATCH 23/49] MMT-2313: added some comments
---
app/controllers/collections_controller.rb | 3 ++-
app/helpers/loss_report_helper.rb | 8 ++++----
2 files changed, 6 insertions(+), 5 deletions(-)
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index 5cc0fb3fd..b613d674f 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -121,8 +121,9 @@ def loss_report
# it's important that they're able to see if any data loss occurs in the translation to umm.
# This method is needed to reference the appropriate helper and view for the lossiness report
concept_id = params[:id]
+ ft = params[:format]
respond_to do |format|
- format.any {render plain: loss_report_output(concept_id, hide_items=false, disp='text') }
+ format.text {render plain: loss_report_output(concept_id, hide_items=true, disp='text') }
format.json { render json: JSON.pretty_generate(loss_report_output(concept_id, hide_items=false, disp='json')) }
end
end
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 369c62d8a..cd24898b7 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -5,14 +5,14 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
orig = Nokogiri::XML(orig) { |config| config.strict.noblanks }
conv = Nokogiri::XML(conv) { |config| config.strict.noblanks }
- ignored_paths = Array.new
+ ignored_paths = Array.new # This array is used to keep track of the paths that lead to arrays that have already been mapped
comparison_string = String.new if disp == 'text'
comparison_hash = Hash.new if disp == 'json'
counter = 1
- orig.diff(conv, {:added => true, :removed => true}) do |change,node|
+ orig.diff(conv, {:added => true, :removed => true}) do |change,node|
split_path = node.parent.path.split('[')
- if node.parent.path.include?('[') && !ignored_paths.include?(split_path[0])
+ if node.parent.path.include?('[') && !ignored_paths.include?(split_path[0]) #
ignored_paths << split_path[0]
array_comparison(split_path[0], orig_h, conv_h).each do |item|
if disp == 'text'
@@ -59,7 +59,7 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
elsif disp == 'json'
return comparison_hash
end
-
+
end
def hash_map(hash)
From 748631e6bfbf07ad1398185d8594926f59a675fd Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Mon, 20 Jul 2020 09:07:14 -0400
Subject: [PATCH 24/49] MMT-2313: increased readability of loss_report_output
---
app/controllers/collections_controller.rb | 1 -
app/helpers/loss_report_helper.rb | 152 ++++++++++------------
2 files changed, 69 insertions(+), 84 deletions(-)
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index b613d674f..e148bed83 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -121,7 +121,6 @@ def loss_report
# it's important that they're able to see if any data loss occurs in the translation to umm.
# This method is needed to reference the appropriate helper and view for the lossiness report
concept_id = params[:id]
- ft = params[:format]
respond_to do |format|
format.text {render plain: loss_report_output(concept_id, hide_items=true, disp='text') }
format.json { render json: JSON.pretty_generate(loss_report_output(concept_id, hide_items=false, disp='json')) }
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index cd24898b7..4fc92a00b 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -1,65 +1,53 @@
module LossReportHelper
def loss_report_output(concept_id, hide_items=true, disp='text')
- orig,conv,orig_h,conv_h = prepare_collections(concept_id, 'echo10', '1.15.3')
+ # depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
+ orig,conv,orig_h,conv_h,ct = prepare_collections(concept_id, '1.15.3')
+
orig = Nokogiri::XML(orig) { |config| config.strict.noblanks }
conv = Nokogiri::XML(conv) { |config| config.strict.noblanks }
ignored_paths = Array.new # This array is used to keep track of the paths that lead to arrays that have already been mapped
comparison_string = String.new if disp == 'text'
comparison_hash = Hash.new if disp == 'json'
+ comparison_string += (ct + "\n\n")
counter = 1
- orig.diff(conv, {:added => true, :removed => true}) do |change,node|
- split_path = node.parent.path.split('[')
- if node.parent.path.include?('[') && !ignored_paths.include?(split_path[0]) #
- ignored_paths << split_path[0]
- array_comparison(split_path[0], orig_h, conv_h).each do |item|
- if disp == 'text'
- comparison_string += "#{counter}. #{item[0]}: #{item[1]}".ljust(60) + item[2] + "\n" if hide_items == false
- comparison_string += "#{counter}. #{item[0]}: ".ljust(2) + item[2] + "\n" if hide_items == true
- elsif disp == 'json'
- comparison_hash["#{counter}. #{item[0]}: #{item[2]}"] = item[1]
- end
+ orig.diff(conv, {:added => true, :removed => true}) do |change,node|
+ element = node.to_xml
+ path = node.parent.path
+ split_path = path.split('[')[0]
+
+ if path.include?('[') && !ignored_paths.include?(split_path)
+ ignored_paths << split_path
+ array_comparison(split_path, orig_h, conv_h).each do |item|
+ add_to_report(counter, item[0], item[1], item[2], hide_items, disp, comparison_hash, comparison_string)
counter += 1
end
- elsif !ignored_paths.include?(split_path[0]) && !path_leads_to_list?(node.parent.path, orig_h, conv_h)
- element = node.to_xml
- path = node.parent.path
- if disp == 'text'
- if element.include?('<' && '' && '>')
- element = Hash.from_xml(element)
- hash_map(element).each do |item|
- comparison_string += "#{counter}. #{change}: #{item['value']}".ljust(60) + path + '/' + item['path'] + "\n" if hide_items == false
- comparison_string += "#{counter}. #{change}: ".ljust(2) + path + '/' + item['path'] + "\n" if hide_items == true
- counter += 1
- end
- else
- comparison_string += "#{counter}. #{change}: #{element}".ljust(60) + path + "\n" if hide_items == false
- comparison_string += "#{counter}. #{change}: ".ljust(2) + path + "\n" if hide_items == true
- counter += 1
- end
- elsif disp == 'json'
- if element.include?('<' && '' && '>')
- element = Hash.from_xml(element)
- hash_map(element).each do |item|
- comparison_hash["#{counter}. #{change}: #{path + '/' + item['path']}"] = item['value']
- counter += 1
- end
- else
- comparison_hash["#{counter}. #{change}: #{path}"] = element
+ elsif !ignored_paths.include?(split_path) && !path_leads_to_list?(path, orig_h, conv_h)
+ if is_xml? node
+ element = Hash.from_xml(element)
+ hash_map(element).each do |item|
+ add_to_report(counter, change, item['value'], path +'/'+ item['path'], hide_items, disp, comparison_hash, comparison_string)
counter += 1
end
+ else
+ add_to_report(counter, change, element, path, hide_items, disp, comparison_hash, comparison_string)
+ counter += 1
end
end
- end
- if disp == 'text'
- return comparison_string
- elsif disp == 'json'
- return comparison_hash
end
+ if disp == 'text' then return comparison_string
+ elsif disp == 'json' then return comparison_hash end
+ end
+ def add_to_report(counter, change, element, path, hide_items, disp, comparison_hash, comparison_string)
+ # this function serves to preclude complex nests from forming in loss_report_output the
+ # following 'if' structure is intended to increase readability by eliminating nests
+ return comparison_string.concat("#{counter}. #{change}: #{element}".ljust(60) + path + "\n") if hide_items == false && disp == 'text'
+ return comparison_string.concat("#{counter}. #{change}: ".ljust(2) + path + "\n") if hide_items == true && disp == 'text'
+ return comparison_hash["#{counter}. #{change}: #{path}"] = element if disp == 'json'
end
def hash_map(hash)
@@ -77,36 +65,37 @@ def hash_map(hash)
buckets
end
- def prepare_collections(concept_id, format, umm_c_version)
+ def is_xml?(node)
+ if node.to_xml.include?('<' && '' && '>') then return true
+ else return false end
+ end
+
+ def prepare_collections(concept_id, umm_c_version)
# TODO: need to add exception handling for get_concept, translate_collection
original_collection_native_xml = cmr_client.get_concept(concept_id,token, {})
- # concept ID and format can be scalped from headers etc
+ content_type = original_collection_native_xml.headers.fetch('content-type').split(';')[0]
original_collection_native_hash = Hash.from_xml(original_collection_native_xml.body)
- translated_collection_umm_json = cmr_client.translate_collection(original_collection_native_xml.body, "application/#{format}+xml", "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", skip_validation=true)
- translated_collection_native_xml = cmr_client.translate_collection(translated_collection_umm_json.body.to_json, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", "application/#{format}+xml", skip_validation=true)
+ translated_collection_umm_json = cmr_client.translate_collection(original_collection_native_xml.body, content_type, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", skip_validation=true)
+ translated_collection_native_xml = cmr_client.translate_collection(translated_collection_umm_json.body.to_json, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", content_type, skip_validation=true)
translated_collection_native_hash = Hash.from_xml(translated_collection_native_xml.body)
- # File.write('/Users/ctrummer/Documents/devtesting/o_'+concept_id+'.json', JSON.pretty_generate(original_collection_native_hash))
- # File.write('/Users/ctrummer/Documents/devtesting/c_'+concept_id+'.json', JSON.pretty_generate(translated_collection_native_hash))
- # File.write('/Users/ctrummer/Documents/devtesting/o_'+concept_id+'.xml', original_collection_native_xml.body)
- # File.write('/Users/ctrummer/Documents/devtesting/c_'+concept_id+'.xml', translated_collection_native_xml.body)
- return original_collection_native_xml.body, translated_collection_native_xml.body, original_collection_native_hash, translated_collection_native_hash
+ return original_collection_native_xml.body, translated_collection_native_xml.body, original_collection_native_hash, translated_collection_native_hash, content_type
end
def path_leads_to_list?(path, org_hash, conv_hash)
# this method takes a path string (and the full original and converted hashes) and outputs true if the path string contains a list; else false
- org_hash_path = hash_navigation(path, org_hash)
- conv_hash_path = hash_navigation(path, conv_hash)
+ org_hash = hash_navigation(path, org_hash)
+ conv_hash = hash_navigation(path, conv_hash)
if path.include?("[") && path.include?("]")
bool = true
- elsif org_hash_path.is_a?(Hash) && conv_hash_path.is_a?(Hash)
+ elsif org_hash.is_a?(Hash) && conv_hash.is_a?(Hash)
# the number of keys must be 1 because all arrays in echo10, dif10, and iso19115 are tagged similar to:
# contact and so all array-containing tags will be the plural
- # of the array name. This clause serves to idenitfy array-containing tags when their paths aren't properly
+ # of the array name. This clause serves to identify array-containing tags when their paths aren't properly
# displayed by nokogiri
- bool = true if org_hash_path.keys.length == 1 && org_hash_path[org_hash_path.keys[0]].is_a?(Array)
- bool = true if conv_hash_path.keys.length == 1 && conv_hash_path[conv_hash_path.keys[0]].is_a?(Array)
- elsif org_hash_path.is_a?(Array) || conv_hash_path.is_a?(Array)
+ bool = true if org_hash.keys.length == 1 && org_hash[org_hash.keys[0]].is_a?(Array)
+ bool = true if conv_hash.keys.length == 1 && conv_hash[conv_hash.keys[0]].is_a?(Array)
+ elsif org_hash.is_a?(Array) || conv_hash.is_a?(Array)
bool = true
else
bool = false
@@ -114,15 +103,14 @@ def path_leads_to_list?(path, org_hash, conv_hash)
bool
end
- def hash_navigation(dir, hash)
+ def hash_navigation(path, hash)
# Passed a path string and the hash being navigated. This method parses the path string and
- # returns the hash at the end of the path
- dir = dir.split '/'
- dir.each do |key|
- if !key.empty? && hash.is_a?(Hash)
- hash = hash[key]
- elsif hash.is_a? Array
+ # returns the hash/value at the end of the path
+ path.split('/').each do |key|
+ if hash.is_a? Array
return hash
+ elsif hash.key?(key) && hash.is_a?(Hash)
+ hash = hash[key]
end
end
hash
@@ -130,40 +118,38 @@ def hash_navigation(dir, hash)
def array_comparison(path, original_hash, converted_hash)
- org_array = hash_navigation(path, original_hash)
- conv_array = hash_navigation(path, converted_hash)
+ pretranslation_array = hash_navigation(path, original_hash)
+ post_translation_array = hash_navigation(path, converted_hash)
- org_array.is_a?(Array) ? org_arr = org_array.clone : org_arr = Array.wrap(org_array)
- org_array = Array.wrap(org_array) unless org_array.is_a?(Array)
- conv_array.is_a?(Array) ? conv_arr = conv_array.clone : conv_arr = Array.wrap(conv_array)
- conv_array = Array.wrap(conv_array) unless conv_array.is_a?(Array)
+ pretranslation_array.is_a?(Array) ? lost_items_arr = pretranslation_array.clone : lost_items_arr = Array.wrap(pretranslation_array)
+ pretranslation_array = Array.wrap(pretranslation_array) unless pretranslation_array.is_a?(Array)
+ post_translation_array.is_a?(Array) ? added_itmes_arr = post_translation_array.clone : added_itmes_arr = Array.wrap(post_translation_array)
+ post_translation_array = Array.wrap(post_translation_array) unless post_translation_array.is_a?(Array)
# org_arr and conv_arr are copies of org_array and conv_array, respectively.
# The *_arr values are edited during the comparison between the org_array and conv_array arrays
# and so the *_array arrays are used to maintained a full version of each array for indexing the items in the following lines.
- for conv_item in conv_array
- for org_item in org_array
+ for conv_item in post_translation_array
+ for org_item in pretranslation_array
if org_item.eql? conv_item
- org_arr.delete(org_item)
- conv_arr.delete(conv_item)
+ lost_items_arr.delete(org_item)
+ added_itmes_arr.delete(conv_item)
break
end
end
end
output = Array.new
- org_arr.each do |item|
- path_with_index = path + "[#{org_array.index(item)}]"
- loss_item = ['-', item, path_with_index]
- output << loss_item
+ lost_items_arr.each do |item|
+ path_with_index = path + "[#{pretranslation_array.index(item)}]"
+ output << ['-', item, path_with_index]
end
- conv_arr.each do |item|
- path_with_index = path + "[#{conv_array.index(item)}]"
- loss_item = ['+', item, path_with_index]
- output << loss_item
+ added_itmes_arr.each do |item|
+ path_with_index = path + "[#{post_translation_array.index(item)}]"
+ output << ['+', item, path_with_index]
end
output
end
From d6d5077bcc8ae54f8af119139c89081678ffec0e Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Mon, 20 Jul 2020 11:00:27 -0400
Subject: [PATCH 25/49] MMT-2313: improved readability
---
app/helpers/loss_report_helper.rb | 31 +++++++++++++++++--------------
1 file changed, 17 insertions(+), 14 deletions(-)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 4fc92a00b..31561b749 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -10,7 +10,8 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
ignored_paths = Array.new # This array is used to keep track of the paths that lead to arrays that have already been mapped
comparison_string = String.new if disp == 'text'
comparison_hash = Hash.new if disp == 'json'
- comparison_string += (ct + "\n\n")
+ comparison_hash['format'] = ct
+ comparison_string += (ct + "\n\n") if disp == 'text'
counter = 1
orig.diff(conv, {:added => true, :removed => true}) do |change,node|
@@ -25,7 +26,7 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
counter += 1
end
elsif !ignored_paths.include?(split_path) && !path_leads_to_list?(path, orig_h, conv_h)
- if is_xml? node
+ if is_xml?(node)
element = Hash.from_xml(element)
hash_map(element).each do |item|
add_to_report(counter, change, item['value'], path +'/'+ item['path'], hide_items, disp, comparison_hash, comparison_string)
@@ -105,9 +106,9 @@ def path_leads_to_list?(path, org_hash, conv_hash)
def hash_navigation(path, hash)
# Passed a path string and the hash being navigated. This method parses the path string and
- # returns the hash/value at the end of the path
+ # returns the array/value at the end of the path
path.split('/').each do |key|
- if hash.is_a? Array
+ if hash.is_a?(Array)
return hash
elsif hash.key?(key) && hash.is_a?(Hash)
hash = hash[key]
@@ -118,21 +119,23 @@ def hash_navigation(path, hash)
def array_comparison(path, original_hash, converted_hash)
- pretranslation_array = hash_navigation(path, original_hash)
+ pre_translation_array = hash_navigation(path, original_hash)
post_translation_array = hash_navigation(path, converted_hash)
- pretranslation_array.is_a?(Array) ? lost_items_arr = pretranslation_array.clone : lost_items_arr = Array.wrap(pretranslation_array)
- pretranslation_array = Array.wrap(pretranslation_array) unless pretranslation_array.is_a?(Array)
+ # in the case that a one-item array is parsed as a regular key-value pair instead of an array, an Array wrapper is placed around key-val pair
+ # so that the following for loops can be executed without error
+ pre_translation_array.is_a?(Array) ? lost_items_arr = pre_translation_array.clone : lost_items_arr = Array.wrap(pre_translation_array)
+ pre_translation_array = Array.wrap(pre_translation_array)
post_translation_array.is_a?(Array) ? added_itmes_arr = post_translation_array.clone : added_itmes_arr = Array.wrap(post_translation_array)
- post_translation_array = Array.wrap(post_translation_array) unless post_translation_array.is_a?(Array)
+ post_translation_array = Array.wrap(post_translation_array)
- # org_arr and conv_arr are copies of org_array and conv_array, respectively.
- # The *_arr values are edited during the comparison between the org_array and conv_array arrays
- # and so the *_array arrays are used to maintained a full version of each array for indexing the items in the following lines.
+ # as defined above, the lost_items_arr and added_itmes_arr are copies of pre_translation_array and post_translation_array, respectively.
+ # The *_arr values are edited during the comparison between the pre_translation_array and post_translation_array arrays
+ # and so the *_array arrays are used to maintain a full version of each array for indexing the items in the following lines.
for conv_item in post_translation_array
- for org_item in pretranslation_array
- if org_item.eql? conv_item
+ for org_item in pre_translation_array
+ if org_item == conv_item
lost_items_arr.delete(org_item)
added_itmes_arr.delete(conv_item)
break
@@ -142,7 +145,7 @@ def array_comparison(path, original_hash, converted_hash)
output = Array.new
lost_items_arr.each do |item|
- path_with_index = path + "[#{pretranslation_array.index(item)}]"
+ path_with_index = path + "[#{pre_translation_array.index(item)}]"
output << ['-', item, path_with_index]
end
From 52d585901fff7fa58c0a4d01d9f17928a6c81f45 Mon Sep 17 00:00:00 2001
From: ryanmiller-1 <40173609+ryanmiller-1@users.noreply.github.com>
Date: Tue, 21 Jul 2020 10:26:29 -0400
Subject: [PATCH 26/49] Hotfix/MMT-2232-1 Fixing tests bamboo started failing
(#619)
* MMT-2232 fixing some tests that bamboo started failing.
* MMT-2232 adding comment to add_data_center_with_retry
---
.../forms/data_centers_form_spec.rb | 2 +-
.../saving_data_centers_data_contacts_spec.rb | 2 +-
.../collection_drafts/forms/validation_spec.rb | 6 ++++++
spec/support/draft_helpers.rb | 15 +++++++++++++++
4 files changed, 23 insertions(+), 2 deletions(-)
diff --git a/spec/features/collection_drafts/forms/data_centers_form_spec.rb b/spec/features/collection_drafts/forms/data_centers_form_spec.rb
index 1d41ce3a5..00082826e 100644
--- a/spec/features/collection_drafts/forms/data_centers_form_spec.rb
+++ b/spec/features/collection_drafts/forms/data_centers_form_spec.rb
@@ -27,7 +27,7 @@
within '#draft_data_centers_1' do
select 'Originator', from: 'Role'
- add_data_center('ESA/ED')
+ add_data_center_with_retry('ESA/ED')
add_contact_information(type: 'data_center', single: false, button_type: 'Data Center')
end
diff --git a/spec/features/collection_drafts/forms/saving_data_centers_data_contacts_spec.rb b/spec/features/collection_drafts/forms/saving_data_centers_data_contacts_spec.rb
index 6a31955e2..5fa7a27f0 100644
--- a/spec/features/collection_drafts/forms/saving_data_centers_data_contacts_spec.rb
+++ b/spec/features/collection_drafts/forms/saving_data_centers_data_contacts_spec.rb
@@ -66,7 +66,7 @@
within '.multiple.data-centers > .multiple-item-1' do
select 'Originator', from: 'Role'
- add_data_center('ESA/ED')
+ add_data_center_with_retry('ESA/ED')
end
within '.nav-top' do
diff --git a/spec/features/collection_drafts/forms/validation_spec.rb b/spec/features/collection_drafts/forms/validation_spec.rb
index ac0c80194..800af7f4f 100644
--- a/spec/features/collection_drafts/forms/validation_spec.rb
+++ b/spec/features/collection_drafts/forms/validation_spec.rb
@@ -290,6 +290,12 @@
fill_in 'draft_temporal_extents_0_single_date_times_0', with: '2015-07-01T00:00:00Z'
end
+ # Bamboo spontaneously started failling this test with the apparent
+ # cause being that 'done' was not being clicked. Clicking something
+ # outside of the datepicker widget allows the done click to be
+ # processed correctly. Previously, it looks like the click for done
+ # was only exiting the single date time field.
+ find('#draft_temporal_extents_0_precision_of_seconds').click
within '.nav-top' do
click_on 'Done'
end
diff --git a/spec/support/draft_helpers.rb b/spec/support/draft_helpers.rb
index bdf313501..41f3d8d05 100644
--- a/spec/support/draft_helpers.rb
+++ b/spec/support/draft_helpers.rb
@@ -48,6 +48,21 @@ def add_data_center(value)
end
end
+ # Bamboo started failing some tests where it seemed that the select2 was not
+ # opening properly. This is notably slower, so it should only be used when
+ # necessary.
+ def add_data_center_with_retry(value)
+ ActiveSupport::Notifications.instrument 'mmt.performance', activity: 'Helpers::DraftHelpers#add_data_center_with_retry' do
+ find('.select2-container .select2-selection').click
+ begin
+ find(:xpath, '//body').find('.select2-dropdown li.select2-results__option', text: value)
+ rescue Capybara::ElementNotFound
+ find('.select2-container .select2-selection').click
+ end
+ find(:xpath, '//body').find('.select2-dropdown li.select2-results__option', text: value).click
+ end
+ end
+
def add_person
ActiveSupport::Notifications.instrument 'mmt.performance', activity: 'Helpers::DraftHelpers#add_person' do
fill_in 'First Name', with: 'First Name'
From db730a000cd2c2d18d5997ce499d1881a7541930 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Tue, 21 Jul 2020 15:22:14 -0400
Subject: [PATCH 27/49] MMT-2313: attempting to fix asterisk problem
---
app/helpers/loss_report_helper.rb | 89 ++++++++++++++++++++++---------
1 file changed, 63 insertions(+), 26 deletions(-)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 31561b749..5d44853f0 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -2,16 +2,37 @@ module LossReportHelper
def loss_report_output(concept_id, hide_items=true, disp='text')
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
- orig,conv,orig_h,conv_h,ct = prepare_collections(concept_id, '1.15.3')
+ orig_xml,conv_xml,orig_h,conv_h,content_type = prepare_collections(concept_id, '1.15.3')
+
+ # if content_type.include?('dif10')
+ # orig = Nokogiri::XML(orig_h.to_xml({dasherize: false, skip_types: true, skip_instruct: true})) { |config| config.strict.noblanks } .search('DIF').first.dup
+ # conv = Nokogiri::XML(conv_h.to_xml({dasherize: false, skip_types: true, skip_instruct: true})) { |config| config.strict.noblanks } .search('DIF').first.dup
+ # else
+ # orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks }
+ # conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks }
+ # end
+ # orig_xml = orig_xml.split('')[-1]
+ # conv_xml = conv_xml.split('')[-1]
+
+ orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks }
+ conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks }
+
- orig = Nokogiri::XML(orig) { |config| config.strict.noblanks }
- conv = Nokogiri::XML(conv) { |config| config.strict.noblanks }
ignored_paths = Array.new # This array is used to keep track of the paths that lead to arrays that have already been mapped
comparison_string = String.new if disp == 'text'
comparison_hash = Hash.new if disp == 'json'
- comparison_hash['format'] = ct
- comparison_string += (ct + "\n\n") if disp == 'text'
+
+ # comparison_hash['orig'] = hash_map(orig_h) if disp == 'json'
+ # comparison_hash['orig'] = orig_h if disp == 'json'
+ # comparison_hash['conv'] = conv_h if disp == 'json'
+ comparison_string += orig_xml +"\n\n\n\n\n\n" + orig_h.to_xml({dasherize: false, skip_types: true, skip_instruct: true}) if disp == 'text'
+
+ comparison_hash['format'] = content_type if disp == 'json'
+ comparison_string += (content_type + "\n\n") if disp == 'text'
+
+ # p = '/DIF/Related_RL/Related_UR'
+ # comparison_string += (p+': path_leads_to_list? => ' + path_leads_to_list?(p, orig_h, conv_h).to_s + "\n")
counter = 1
orig.diff(conv, {:added => true, :removed => true}) do |change,node|
@@ -19,25 +40,32 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
path = node.parent.path
split_path = path.split('[')[0]
- if path.include?('[') && !ignored_paths.include?(split_path)
- ignored_paths << split_path
- array_comparison(split_path, orig_h, conv_h).each do |item|
- add_to_report(counter, item[0], item[1], item[2], hide_items, disp, comparison_hash, comparison_string)
- counter += 1
- end
- elsif !ignored_paths.include?(split_path) && !path_leads_to_list?(path, orig_h, conv_h)
- if is_xml?(node)
- element = Hash.from_xml(element)
- hash_map(element).each do |item|
- add_to_report(counter, change, item['value'], path +'/'+ item['path'], hide_items, disp, comparison_hash, comparison_string)
- counter += 1
- end
- else
- add_to_report(counter, change, element, path, hide_items, disp, comparison_hash, comparison_string)
- counter += 1
- end
- end
-
+ # comparison_string += (path + "\n") if disp == 'text'
+
+ # if path_leads_to_list?(path, orig_h, conv_h) && !ignored_paths.include?(split_path)
+ # ignored_paths << split_path
+ # array_comparison(split_path, orig_h, conv_h).each do |item|
+ # add_to_report(counter, 'c'+item[0], item[1], item[2], hide_items, disp, comparison_hash, comparison_string)
+ # counter += 1
+ # end
+ # # path += ' identified as a list'
+ # elsif !ignored_paths.include?(split_path)
+ # if is_xml?(node) #Possibly use the nokogiri #xml? method
+ # # path += ' needs hash mapping'
+ # element = Hash.from_xml(element)
+ # hash_map(element).each do |item|
+ # add_to_report(counter, 'ct'+change, item['value'], path +'/'+ item['path'], hide_items, disp, comparison_hash, comparison_string)
+ # counter += 1
+ # end
+ # else
+ # # path += ' pure nokogiri'
+ # add_to_report(counter, change, element, path, hide_items, disp, comparison_hash, comparison_string)
+ # counter += 1
+ # end
+ # end
+
+ add_to_report(counter, change, element, path, hide_items, disp, comparison_hash, comparison_string)
+ counter += 1
end
if disp == 'text' then return comparison_string
elsif disp == 'json' then return comparison_hash end
@@ -46,11 +74,17 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
def add_to_report(counter, change, element, path, hide_items, disp, comparison_hash, comparison_string)
# this function serves to preclude complex nests from forming in loss_report_output the
# following 'if' structure is intended to increase readability by eliminating nests
- return comparison_string.concat("#{counter}. #{change}: #{element}".ljust(60) + path + "\n") if hide_items == false && disp == 'text'
- return comparison_string.concat("#{counter}. #{change}: ".ljust(2) + path + "\n") if hide_items == true && disp == 'text'
+ return comparison_string.concat("#{counter}.".ljust(4)+"#{change}: #{element}".ljust(60) + path + "\n") if hide_items == false && disp == 'text'
+ return comparison_string.concat("#{counter}.".ljust(4)+"#{change}: ".ljust(3) + path + "\n") if hide_items == true && disp == 'text'
return comparison_hash["#{counter}. #{change}: #{path}"] = element if disp == 'json'
end
+ def change_path(path)
+ arr = path.split('/*')
+ arr[0] = '/DIF'
+ arr.join
+ end
+
def hash_map(hash)
buckets = Array.new
hash.each do |key,val|
@@ -87,6 +121,8 @@ def path_leads_to_list?(path, org_hash, conv_hash)
org_hash = hash_navigation(path, org_hash)
conv_hash = hash_navigation(path, conv_hash)
+ # if path == '/DIF/Related-URL' then byebug end
+
if path.include?("[") && path.include?("]")
bool = true
elsif org_hash.is_a?(Hash) && conv_hash.is_a?(Hash)
@@ -101,6 +137,7 @@ def path_leads_to_list?(path, org_hash, conv_hash)
else
bool = false
end
+ # if bool == nil then bool = 'flag' end #THIS NEEDS TO BE EVALUATED
bool
end
From d1854c8d4d6a798ff31652e7ddb45aafa312acc4 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Tue, 21 Jul 2020 15:27:44 -0400
Subject: [PATCH 28/49] MMT-2313 attempting to fix asterisk problem
---
app/helpers/loss_report_helper.rb | 1 +
1 file changed, 1 insertion(+)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 5d44853f0..d8c2e1fb5 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -11,6 +11,7 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
# orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks }
# conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks }
# end
+
# orig_xml = orig_xml.split('')[-1]
# conv_xml = conv_xml.split('')[-1]
From e381d86b51e85d32728a15872dfd1a49a480e3a4 Mon Sep 17 00:00:00 2001
From: ryanmiller-1 <40173609+ryanmiller-1@users.noreply.github.com>
Date: Wed, 22 Jul 2020 10:08:22 -0400
Subject: [PATCH 29/49] MMT-2306 Update FBM link for dMMT (#618)
* MMT-2306 updating footer
* MMT-2306 build without byebug
---
app/views/shared/_footer.html.erb | 28 ++++++++++++++--------------
1 file changed, 14 insertions(+), 14 deletions(-)
diff --git a/app/views/shared/_footer.html.erb b/app/views/shared/_footer.html.erb
index 921486ff7..fcfd0f227 100644
--- a/app/views/shared/_footer.html.erb
+++ b/app/views/shared/_footer.html.erb
@@ -10,18 +10,18 @@
<% unless Rails.env.test? %>
-
-
+
+
<% end %>
From 1bc3d15b26db413e64e79432fec9899c0ac17090 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Wed, 22 Jul 2020 13:29:55 -0400
Subject: [PATCH 30/49] MMT-2313: fix nokogiri problems
---
app/helpers/loss_report_helper.rb | 99 ++++++++++++++++---------------
1 file changed, 52 insertions(+), 47 deletions(-)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index d8c2e1fb5..58bdb7b73 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -4,70 +4,75 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
orig_xml,conv_xml,orig_h,conv_h,content_type = prepare_collections(concept_id, '1.15.3')
- # if content_type.include?('dif10')
- # orig = Nokogiri::XML(orig_h.to_xml({dasherize: false, skip_types: true, skip_instruct: true})) { |config| config.strict.noblanks } .search('DIF').first.dup
- # conv = Nokogiri::XML(conv_h.to_xml({dasherize: false, skip_types: true, skip_instruct: true})) { |config| config.strict.noblanks } .search('DIF').first.dup
- # else
- # orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks }
- # conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks }
- # end
-
- # orig_xml = orig_xml.split('')[-1]
- # conv_xml = conv_xml.split('')[-1]
-
- orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks }
- conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks }
-
-
+ if content_type.include?('dif10')
+ orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks } .remove_namespaces!
+ conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks } .remove_namespaces!
+ else
+ orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks }
+ conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks }
+ end
ignored_paths = Array.new # This array is used to keep track of the paths that lead to arrays that have already been mapped
comparison_string = String.new if disp == 'text'
comparison_hash = Hash.new if disp == 'json'
# comparison_hash['orig'] = hash_map(orig_h) if disp == 'json'
- # comparison_hash['orig'] = orig_h if disp == 'json'
+ comparison_hash['orig'] = orig_h if disp == 'json'
# comparison_hash['conv'] = conv_h if disp == 'json'
- comparison_string += orig_xml +"\n\n\n\n\n\n" + orig_h.to_xml({dasherize: false, skip_types: true, skip_instruct: true}) if disp == 'text'
+ comparison_string += orig_xml if disp == 'text'
+
+ # p = '/DIF'
+ # comparison_string += path_leads_to_list?(p, orig_h, conv_h).to_s + "\n\n" if disp == 'text'
comparison_hash['format'] = content_type if disp == 'json'
comparison_string += (content_type + "\n\n") if disp == 'text'
- # p = '/DIF/Related_RL/Related_UR'
- # comparison_string += (p+': path_leads_to_list? => ' + path_leads_to_list?(p, orig_h, conv_h).to_s + "\n")
-
counter = 1
orig.diff(conv, {:added => true, :removed => true}) do |change,node|
element = node.to_xml
- path = node.parent.path
- split_path = path.split('[')[0]
-
+ path = node.parent.path.split('[')[0]
# comparison_string += (path + "\n") if disp == 'text'
- # if path_leads_to_list?(path, orig_h, conv_h) && !ignored_paths.include?(split_path)
- # ignored_paths << split_path
- # array_comparison(split_path, orig_h, conv_h).each do |item|
- # add_to_report(counter, 'c'+item[0], item[1], item[2], hide_items, disp, comparison_hash, comparison_string)
- # counter += 1
- # end
- # # path += ' identified as a list'
- # elsif !ignored_paths.include?(split_path)
- # if is_xml?(node) #Possibly use the nokogiri #xml? method
- # # path += ' needs hash mapping'
- # element = Hash.from_xml(element)
- # hash_map(element).each do |item|
- # add_to_report(counter, 'ct'+change, item['value'], path +'/'+ item['path'], hide_items, disp, comparison_hash, comparison_string)
- # counter += 1
- # end
- # else
- # # path += ' pure nokogiri'
- # add_to_report(counter, change, element, path, hide_items, disp, comparison_hash, comparison_string)
- # counter += 1
- # end
- # end
-
- add_to_report(counter, change, element, path, hide_items, disp, comparison_hash, comparison_string)
+ # needs to check for lists that pass the first if condition but should be evaluated by the elsif (ie. Related_URL)
+ # figure out why some elements are not evaluating true at the first if (ie. Extended_Metadata)
+
+ if path_leads_to_list?(path, orig_h, conv_h) && !ignored_paths.include?(path) # all lists
+ ignored_paths << path
+ array_comparison(path, orig_h, conv_h).each do |item|
+ add_to_report(counter, 'c'+item[0], item[1], item[2], hide_items, disp, comparison_hash, comparison_string)
+ counter += 1
+ end
+ elsif !ignored_paths.include?(path) # nokogiri
+ if is_xml?(node) #Possibly use the nokogiri #xml? method
+ element = Hash.from_xml(element)
+
+ hash_map(element).each do |item|
+ if path_leads_to_list?(path +'/'+ item['path'], orig_h, conv_h) && !ignored_paths.include?(path +'/'+ item['path']) # all lists
+ ignored_paths << path +'/'+ item['path']
+ array_comparison(path +'/'+ item['path'], orig_h, conv_h).each do |item|
+ add_to_report(counter, 'cc'+item[0], item[1], item[2], hide_items, disp, comparison_hash, comparison_string)
+ counter += 1
+ end
+ else
+ add_to_report(counter, 'ct'+change, item['value'], path +'/'+ item['path'], hide_items, disp, comparison_hash, comparison_string)
+ counter += 1
+ end
+
+ end
+ else
+ add_to_report(counter, change, element, path, hide_items, disp, comparison_hash, comparison_string)
+ counter += 1
+ end
+ end
+ end
+
+ counter = 0
+ comparison_string += "\n\n\n\n" if disp == 'text'
+ orig.diff(conv, {:added => true, :removed => true}) do |change,node|
+ add_to_report(counter, change, node.to_xml, node.parent.path, false, disp, comparison_hash, comparison_string)
counter += 1
end
+
if disp == 'text' then return comparison_string
elsif disp == 'json' then return comparison_hash end
end
@@ -123,7 +128,7 @@ def path_leads_to_list?(path, org_hash, conv_hash)
conv_hash = hash_navigation(path, conv_hash)
# if path == '/DIF/Related-URL' then byebug end
-
+ bool = false
if path.include?("[") && path.include?("]")
bool = true
elsif org_hash.is_a?(Hash) && conv_hash.is_a?(Hash)
From be69b303276aa4885fc5a3de7ae5b545e74b2740 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Wed, 22 Jul 2020 16:38:37 -0400
Subject: [PATCH 31/49] MMT-2313: cleaned up inefficiencies, fixed bugs
---
app/helpers/loss_report_helper.rb | 86 +++++++++++++------------------
1 file changed, 37 insertions(+), 49 deletions(-)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 58bdb7b73..dfdd70cd9 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -17,12 +17,12 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
comparison_hash = Hash.new if disp == 'json'
# comparison_hash['orig'] = hash_map(orig_h) if disp == 'json'
- comparison_hash['orig'] = orig_h if disp == 'json'
+ # comparison_hash['orig'] = orig_h if disp == 'json'
# comparison_hash['conv'] = conv_h if disp == 'json'
- comparison_string += orig_xml if disp == 'text'
+ # comparison_string += orig_xml if disp == 'text'
# p = '/DIF'
- # comparison_string += path_leads_to_list?(p, orig_h, conv_h).to_s + "\n\n" if disp == 'text'
+ # comparison_string += path_leads_to_array?(p, orig_h, conv_h).to_s + "\n\n" if disp == 'text'
comparison_hash['format'] = content_type if disp == 'json'
comparison_string += (content_type + "\n\n") if disp == 'text'
@@ -33,10 +33,10 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
path = node.parent.path.split('[')[0]
# comparison_string += (path + "\n") if disp == 'text'
- # needs to check for lists that pass the first if condition but should be evaluated by the elsif (ie. Related_URL)
- # figure out why some elements are not evaluating true at the first if (ie. Extended_Metadata)
+ # need to check for lists in hash_map obj
+ # need to solve problem where noko cherry picks an item out of a list (giving no indication it is a list; ie. Extended_metadata, related_URL)
- if path_leads_to_list?(path, orig_h, conv_h) && !ignored_paths.include?(path) # all lists
+ if path_leads_to_array?(path, orig_h, conv_h) && !ignored_paths.include?(path) # all lists
ignored_paths << path
array_comparison(path, orig_h, conv_h).each do |item|
add_to_report(counter, 'c'+item[0], item[1], item[2], hide_items, disp, comparison_hash, comparison_string)
@@ -47,12 +47,19 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
element = Hash.from_xml(element)
hash_map(element).each do |item|
- if path_leads_to_list?(path +'/'+ item['path'], orig_h, conv_h) && !ignored_paths.include?(path +'/'+ item['path']) # all lists
- ignored_paths << path +'/'+ item['path']
- array_comparison(path +'/'+ item['path'], orig_h, conv_h).each do |item|
- add_to_report(counter, 'cc'+item[0], item[1], item[2], hide_items, disp, comparison_hash, comparison_string)
- counter += 1
+ if path_leads_to_array?(path +'/'+ item['path'], orig_h, conv_h) && !ignored_paths.include?(path +'/'+ item['path']) # all lists
+ # hash_navigation(path+'/'+ item['path'], orig_h).is_a?(Array) ? arr_path = hash_navigation(path+'/'+ item['path'], orig_h, return_path=true) : arr_path = hash_navigation(path+'/'+ item['path'], conv_h, return_path=true)
+ arr_path = hash_navigation(path +'/'+ item['path'],orig_h,return_path=true) if hash_navigation(path +'/'+ item['path'], orig_h).is_a?(Array)
+ arr_path = hash_navigation(path +'/'+ item['path'],conv_h,return_path=true) if hash_navigation(path +'/'+ item['path'], conv_h).is_a?(Array)
+
+ if !ignored_paths.include?(arr_path)
+ ignored_paths << arr_path
+ array_comparison(arr_path, orig_h, conv_h).each do |item|
+ add_to_report(counter, 'cc'+item[0], item[1], item[2], hide_items, disp, comparison_hash, comparison_string)
+ counter += 1
+ end
end
+
else
add_to_report(counter, 'ct'+change, item['value'], path +'/'+ item['path'], hide_items, disp, comparison_hash, comparison_string)
counter += 1
@@ -67,11 +74,11 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
end
counter = 0
- comparison_string += "\n\n\n\n" if disp == 'text'
- orig.diff(conv, {:added => true, :removed => true}) do |change,node|
- add_to_report(counter, change, node.to_xml, node.parent.path, false, disp, comparison_hash, comparison_string)
- counter += 1
- end
+ comparison_string += "\n\n\n\n#{JSON.pretty_generate(orig_h)}\n\n\n\n#{JSON.pretty_generate(conv_h)}" if disp == 'text'
+ # orig.diff(conv, {:added => true, :removed => true}) do |change,node|
+ # add_to_report(counter, change, node.to_xml, node.parent.path, false, disp, comparison_hash, comparison_string)
+ # counter += 1
+ # end
if disp == 'text' then return comparison_string
elsif disp == 'json' then return comparison_hash end
@@ -85,12 +92,6 @@ def add_to_report(counter, change, element, path, hide_items, disp, comparison_h
return comparison_hash["#{counter}. #{change}: #{path}"] = element if disp == 'json'
end
- def change_path(path)
- arr = path.split('/*')
- arr[0] = '/DIF'
- arr.join
- end
-
def hash_map(hash)
buckets = Array.new
hash.each do |key,val|
@@ -122,7 +123,7 @@ def prepare_collections(concept_id, umm_c_version)
return original_collection_native_xml.body, translated_collection_native_xml.body, original_collection_native_hash, translated_collection_native_hash, content_type
end
- def path_leads_to_list?(path, org_hash, conv_hash)
+ def path_leads_to_array?(path, org_hash, conv_hash)
# this method takes a path string (and the full original and converted hashes) and outputs true if the path string contains a list; else false
org_hash = hash_navigation(path, org_hash)
conv_hash = hash_navigation(path, conv_hash)
@@ -147,17 +148,23 @@ def path_leads_to_list?(path, org_hash, conv_hash)
bool
end
- def hash_navigation(path, hash)
+ def hash_navigation(path, hash, return_path=false)
# Passed a path string and the hash being navigated. This method parses the path string and
# returns the array/value at the end of the path
+ current_path = String.new
path.split('/').each do |key|
if hash.is_a?(Array)
- return hash
+ return hash if return_path == false
+ return current_path if return_path == true
elsif hash.key?(key) && hash.is_a?(Hash)
+ current_path += "/#{key}"
hash = hash[key]
+ elsif !hash.key?(key) && key != ''
+ return path_exists = false
end
end
- hash
+ return hash if return_path == false
+ return current_path if return_path == true
end
def array_comparison(path, original_hash, converted_hash)
@@ -165,35 +172,16 @@ def array_comparison(path, original_hash, converted_hash)
pre_translation_array = hash_navigation(path, original_hash)
post_translation_array = hash_navigation(path, converted_hash)
- # in the case that a one-item array is parsed as a regular key-value pair instead of an array, an Array wrapper is placed around key-val pair
- # so that the following for loops can be executed without error
- pre_translation_array.is_a?(Array) ? lost_items_arr = pre_translation_array.clone : lost_items_arr = Array.wrap(pre_translation_array)
- pre_translation_array = Array.wrap(pre_translation_array)
- post_translation_array.is_a?(Array) ? added_itmes_arr = post_translation_array.clone : added_itmes_arr = Array.wrap(post_translation_array)
- post_translation_array = Array.wrap(post_translation_array)
-
- # as defined above, the lost_items_arr and added_itmes_arr are copies of pre_translation_array and post_translation_array, respectively.
- # The *_arr values are edited during the comparison between the pre_translation_array and post_translation_array arrays
- # and so the *_array arrays are used to maintain a full version of each array for indexing the items in the following lines.
-
- for conv_item in post_translation_array
- for org_item in pre_translation_array
- if org_item == conv_item
- lost_items_arr.delete(org_item)
- added_itmes_arr.delete(conv_item)
- break
- end
- end
- end
+ pre_translation_array == false ? pre_translation_array = Array.new : pre_translation_array = Array.wrap(pre_translation_array)
+ post_translation_array == false ? post_translation_array = Array.new : post_translation_array = Array.wrap(post_translation_array)
output = Array.new
- lost_items_arr.each do |item|
+ (pre_translation_array - post_translation_array).each do |item|
path_with_index = path + "[#{pre_translation_array.index(item)}]"
output << ['-', item, path_with_index]
end
-
- added_itmes_arr.each do |item|
+ (post_translation_array - pre_translation_array).each do |item|
path_with_index = path + "[#{post_translation_array.index(item)}]"
output << ['+', item, path_with_index]
end
From 34bb5fe53b658deeda10b17fc58302e0742407cf Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Fri, 24 Jul 2020 14:15:21 -0400
Subject: [PATCH 32/49] MMT-2313: fixed bugs, improved accuracy
---
app/helpers/loss_report_helper.rb | 214 +++++++++++++++----------
lib/tasks/compare_xml_collections.rake | 124 --------------
2 files changed, 125 insertions(+), 213 deletions(-)
delete mode 100644 lib/tasks/compare_xml_collections.rake
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index dfdd70cd9..d40f1aa55 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -4,7 +4,7 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
orig_xml,conv_xml,orig_h,conv_h,content_type = prepare_collections(concept_id, '1.15.3')
- if content_type.include?('dif10')
+ if content_type.include?('iso') || content_type.include?('dif')
orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks } .remove_namespaces!
conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks } .remove_namespaces!
else
@@ -12,94 +12,156 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks }
end
- ignored_paths = Array.new # This array is used to keep track of the paths that lead to arrays that have already been mapped
- comparison_string = String.new if disp == 'text'
- comparison_hash = Hash.new if disp == 'json'
+ #write files to test that all changes are being found with opendiff
+ dir = '/Users/ctrummer/Documents/devtesting'
+ o = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks } .remove_namespaces!
+ c = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks } .remove_namespaces!
+ File.write("#{dir}/o_#{concept_id}.xml", o.to_xml)
+ File.write("#{dir}/c_#{concept_id}.xml", c.to_xml)
- # comparison_hash['orig'] = hash_map(orig_h) if disp == 'json'
- # comparison_hash['orig'] = orig_h if disp == 'json'
- # comparison_hash['conv'] = conv_h if disp == 'json'
- # comparison_string += orig_xml if disp == 'text'
+ arr_paths = Array.new # This array is used to keep track of the paths that lead to arrays that have already been mapped
+ text_output = String.new if disp == 'text'
+ json_output = Hash.new if disp == 'json'
- # p = '/DIF'
- # comparison_string += path_leads_to_array?(p, orig_h, conv_h).to_s + "\n\n" if disp == 'text'
+ # json_output['orig'] = hash_map(orig_h) if disp == 'json'
+ # json_output['orig'] = orig_h if disp == 'json'
+ # json_output['conv'] = conv_h if disp == 'json'
+ # text_output += orig_xml if disp == 'text'
- comparison_hash['format'] = content_type if disp == 'json'
- comparison_string += (content_type + "\n\n") if disp == 'text'
+ json_output['format'] = content_type if disp == 'json'
+ text_output += (content_type + "\n\n") if disp == 'text'
+
+ # text_output += top_level_arr_path('/Collection/OnlineResources/OnlineResource', orig_h, conv_h).to_s+"\n"
- counter = 1
orig.diff(conv, {:added => true, :removed => true}) do |change,node|
element = node.to_xml
path = node.parent.path.split('[')[0]
- # comparison_string += (path + "\n") if disp == 'text'
+ arr_path = top_level_arr_path(path, orig_h, conv_h)
+
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+ puts "---------------------------------------------------------------------------------"
+ puts "arr_path: #{arr_path} ... node.parent.path: #{node.parent.path} ... path: #{path}"
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+
+ if arr_path && path_not_checked?(arr_path, arr_paths)
- # need to check for lists in hash_map obj
- # need to solve problem where noko cherry picks an item out of a list (giving no indication it is a list; ie. Extended_metadata, related_URL)
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+ puts "** path 1"
+ puts "ar path_not_checked?(arr_path,arr_paths): #{path_not_checked?(arr_path,arr_paths).to_s}"
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- if path_leads_to_array?(path, orig_h, conv_h) && !ignored_paths.include?(path) # all lists
- ignored_paths << path
- array_comparison(path, orig_h, conv_h).each do |item|
- add_to_report(counter, 'c'+item[0], item[1], item[2], hide_items, disp, comparison_hash, comparison_string)
- counter += 1
+ arr_paths << arr_path
+ array_comparison(arr_path, orig_h, conv_h).each do |item| # all lists
+ add_to_report('ar'+item[0], item[1], item[2], hide_items, disp, json_output, text_output)
end
- elsif !ignored_paths.include?(path) # nokogiri
- if is_xml?(node) #Possibly use the nokogiri #xml? method
- element = Hash.from_xml(element)
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+ puts "arr_paths: #{arr_paths}"
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+
+ elsif path_not_checked?(path, arr_paths) # nokogiri
+
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+ puts "** path 2"
+ puts "path_not_checked?(path,arr_paths): #{path_not_checked?(path,arr_paths).to_s}"
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+
+ if is_xml?(node)
+ element = Hash.from_xml(element)
hash_map(element).each do |item|
- if path_leads_to_array?(path +'/'+ item['path'], orig_h, conv_h) && !ignored_paths.include?(path +'/'+ item['path']) # all lists
- # hash_navigation(path+'/'+ item['path'], orig_h).is_a?(Array) ? arr_path = hash_navigation(path+'/'+ item['path'], orig_h, return_path=true) : arr_path = hash_navigation(path+'/'+ item['path'], conv_h, return_path=true)
- arr_path = hash_navigation(path +'/'+ item['path'],orig_h,return_path=true) if hash_navigation(path +'/'+ item['path'], orig_h).is_a?(Array)
- arr_path = hash_navigation(path +'/'+ item['path'],conv_h,return_path=true) if hash_navigation(path +'/'+ item['path'], conv_h).is_a?(Array)
+ arr_path = top_level_arr_path("#{path}/#{item['path']}", orig_h, conv_h)
+
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+ puts "path_not_checked?('path/item['path']}, arr_paths): #{path_not_checked?("#{path}/#{item['path']}", arr_paths)}"
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- if !ignored_paths.include?(arr_path)
- ignored_paths << arr_path
+ if arr_path && path_not_checked?("#{path}/#{item['path']}", arr_paths) # all list
+ if path_not_checked?(arr_path, arr_paths)
+
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+ puts "na path_not_checked?(arr_path, arr_paths): #{path_not_checked?(arr_path, arr_paths)}"
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+
+ arr_paths << arr_path
array_comparison(arr_path, orig_h, conv_h).each do |item|
- add_to_report(counter, 'cc'+item[0], item[1], item[2], hide_items, disp, comparison_hash, comparison_string)
- counter += 1
+ add_to_report('na'+item[0], item[1], item[2], hide_items, disp, json_output, text_output)
end
- end
- else
- add_to_report(counter, 'ct'+change, item['value'], path +'/'+ item['path'], hide_items, disp, comparison_hash, comparison_string)
- counter += 1
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+ puts "arr_paths: #{arr_paths}"
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+
+ end
+ elsif path_not_checked?("#{path}/#{item['path']}", arr_paths)
+ add_to_report('hn'+change, item['value'], "#{path}/#{item['path']}", hide_items, disp, json_output, text_output)
end
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+ puts "arr_paths: #{arr_paths}"
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+
end
else
- add_to_report(counter, change, element, path, hide_items, disp, comparison_hash, comparison_string)
- counter += 1
+ add_to_report('ng'+change, element, path, hide_items, disp, json_output, text_output)
+
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+ puts "arr_paths: #{arr_paths}"
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+
end
end
end
+ if disp == 'text' then return text_output
+ elsif disp == 'json' then return json_output end
+ end
- counter = 0
- comparison_string += "\n\n\n\n#{JSON.pretty_generate(orig_h)}\n\n\n\n#{JSON.pretty_generate(conv_h)}" if disp == 'text'
- # orig.diff(conv, {:added => true, :removed => true}) do |change,node|
- # add_to_report(counter, change, node.to_xml, node.parent.path, false, disp, comparison_hash, comparison_string)
- # counter += 1
- # end
+ def path_not_checked?(arr_path, arr_paths)
+ arr_paths.each do |path|
+ if arr_path.include?(path)
+ return false
+ end
+ end
+ true
+ end
+
+ def top_level_arr_path(path, orig_h, conv_h)
+ pre_translation_array, pre_translation_path = hash_navigation(path, orig_h)
+ post_translation_array, post_translation_path = hash_navigation(path, conv_h)
+
+ return false if pre_translation_array == false && post_translation_array == false
- if disp == 'text' then return comparison_string
- elsif disp == 'json' then return comparison_hash end
+ return pre_translation_path if pre_translation_array.is_a?(Array)
+ return post_translation_path if post_translation_array.is_a?(Array)
+
+ # the number of keys must be 1 because all arrays in echo10, dif10, and iso19115 are tagged similar to:
+ # contact and so all array-containing tags will be the plural
+ # of the array name. This clause serves to identify array-containing tags when their paths aren't properly
+ # displayed by nokogiri
+ if pre_translation_array.is_a?(Hash) && pre_translation_array.keys.length == 1 && pre_translation_array[pre_translation_array.keys[0]].is_a?(Array)
+ return pre_translation_path + "/#{pre_translation_array.keys[0]}"
+ elsif post_translation_array.is_a?(Hash) && post_translation_array.keys.length == 1 && post_translation_array[post_translation_array.keys[0]].is_a?(Array)
+ return post_translation_path + "/#{post_translation_array.keys[0]}"
+ end
+
+ path_contains_array = false
end
- def add_to_report(counter, change, element, path, hide_items, disp, comparison_hash, comparison_string)
+ def add_to_report(change, element, path, hide_items, disp, json_output, text_output)
+ @counter ||= 0 and @counter += 1
+
# this function serves to preclude complex nests from forming in loss_report_output the
# following 'if' structure is intended to increase readability by eliminating nests
- return comparison_string.concat("#{counter}.".ljust(4)+"#{change}: #{element}".ljust(60) + path + "\n") if hide_items == false && disp == 'text'
- return comparison_string.concat("#{counter}.".ljust(4)+"#{change}: ".ljust(3) + path + "\n") if hide_items == true && disp == 'text'
- return comparison_hash["#{counter}. #{change}: #{path}"] = element if disp == 'json'
+ return text_output.concat("#{@counter}.".ljust(4)+"#{change}: #{element}".ljust(60) + path + "\n") if hide_items == false && disp == 'text'
+ puts "#{@counter}.".ljust(4)+"#{change}: ".ljust(3) + path; return text_output.concat("#{@counter}.".ljust(4)+"#{change}: ".ljust(3) + path + "\n") if hide_items == true && disp == 'text'
+ return json_output["#{@counter}. #{change}: #{path}"] = element if disp == 'json'
end
def hash_map(hash)
buckets = Array.new
hash.each do |key,val|
- if val.is_a? Hash
- hash_map(val).each do |item|
- item['path'] = key + '/' + item['path']
- buckets << item
- end
+ if val.is_a? Hash then hash_map(val).each do |item|
+ item['path'] = key + '/' + item['path']
+ buckets << item end
else
buckets << {'path'=> key, 'value'=> val}
end
@@ -123,59 +185,32 @@ def prepare_collections(concept_id, umm_c_version)
return original_collection_native_xml.body, translated_collection_native_xml.body, original_collection_native_hash, translated_collection_native_hash, content_type
end
- def path_leads_to_array?(path, org_hash, conv_hash)
- # this method takes a path string (and the full original and converted hashes) and outputs true if the path string contains a list; else false
- org_hash = hash_navigation(path, org_hash)
- conv_hash = hash_navigation(path, conv_hash)
-
- # if path == '/DIF/Related-URL' then byebug end
- bool = false
- if path.include?("[") && path.include?("]")
- bool = true
- elsif org_hash.is_a?(Hash) && conv_hash.is_a?(Hash)
- # the number of keys must be 1 because all arrays in echo10, dif10, and iso19115 are tagged similar to:
- # contact and so all array-containing tags will be the plural
- # of the array name. This clause serves to identify array-containing tags when their paths aren't properly
- # displayed by nokogiri
- bool = true if org_hash.keys.length == 1 && org_hash[org_hash.keys[0]].is_a?(Array)
- bool = true if conv_hash.keys.length == 1 && conv_hash[conv_hash.keys[0]].is_a?(Array)
- elsif org_hash.is_a?(Array) || conv_hash.is_a?(Array)
- bool = true
- else
- bool = false
- end
- # if bool == nil then bool = 'flag' end #THIS NEEDS TO BE EVALUATED
- bool
- end
-
- def hash_navigation(path, hash, return_path=false)
+ def hash_navigation(path, hash)
# Passed a path string and the hash being navigated. This method parses the path string and
# returns the array/value at the end of the path
current_path = String.new
path.split('/').each do |key|
if hash.is_a?(Array)
- return hash if return_path == false
- return current_path if return_path == true
+ return hash, current_path
elsif hash.key?(key) && hash.is_a?(Hash)
current_path += "/#{key}"
hash = hash[key]
elsif !hash.key?(key) && key != ''
- return path_exists = false
+ return path_exists = false, "#{current_path}/#{key}"
end
end
- return hash if return_path == false
- return current_path if return_path == true
+ return hash, current_path
end
def array_comparison(path, original_hash, converted_hash)
-
- pre_translation_array = hash_navigation(path, original_hash)
- post_translation_array = hash_navigation(path, converted_hash)
+ pre_translation_array = hash_navigation(path, original_hash)[0]
+ post_translation_array = hash_navigation(path, converted_hash)[0]
pre_translation_array == false ? pre_translation_array = Array.new : pre_translation_array = Array.wrap(pre_translation_array)
post_translation_array == false ? post_translation_array = Array.new : post_translation_array = Array.wrap(post_translation_array)
output = Array.new
+
(pre_translation_array - post_translation_array).each do |item|
path_with_index = path + "[#{pre_translation_array.index(item)}]"
output << ['-', item, path_with_index]
@@ -187,4 +222,5 @@ def array_comparison(path, original_hash, converted_hash)
end
output
end
+
end
diff --git a/lib/tasks/compare_xml_collections.rake b/lib/tasks/compare_xml_collections.rake
deleted file mode 100644
index 27ca858b3..000000000
--- a/lib/tasks/compare_xml_collections.rake
+++ /dev/null
@@ -1,124 +0,0 @@
-require 'libxml_to_hash'
-
-namespace :collection do
- desc 'Translate a collection from native format to UMM JSON and back to native format'
- task :loss, [:file, :format, :disp, :version] => :environment do |_task, args|
- args.with_defaults(:version => '1.15.3')
- args.with_defaults(:disp => 'show')
-
- abort 'FORMAT INVALID' unless args.format.eql? ('echo10' || 'dif10' || 'iso19115')
-
- filename = args.file.split('/')[-1]
- puts "\nTranslating #{filename} to UMM JSON..."
-
- native_original_xml = File.read(args.file)
- native_original_hash = Hash.from_xml(native_original_xml)
-
- #translate to UMM
- umm_response = cmr_client.translate_collection(native_original_xml, "application/#{args.format}+xml", "application/vnd.nasa.cmr.umm+json;version=#{args.version}", skip_validation=true )
- umm_json = umm_response.body.to_json
- umm_response.success? ? puts("\nsuccessful translation to UMM") : abort("\nUMM translation failure")
-
- # translate back to native
- back_to_native = cmr_client.translate_collection(umm_json, "application/vnd.nasa.cmr.umm+json;version=#{args.version}", "application/#{args.format}+xml", skip_validation=true )
- native_converted_hash = Hash.from_xml(back_to_native.body)
- native_converted_xml = back_to_native.body
- back_to_native.success? ? puts("successful translation to native format \n\n") : abort("Native format translation failure \n\n")
-
- # nokogiri output
- nokogiri_original = Nokogiri::XML(native_original_xml) { |config| config.strict.noblanks }
- nokogiri_converted = Nokogiri::XML(native_converted_xml) { |config| config.strict.noblanks }
-
- ignored_paths = Array.new
-
- nokogiri_original.diff(nokogiri_converted, {:added => true, :removed => true}) do |change,node|
- split_path = node.parent.path.split('[')
- if node.parent.path.include?('[') && !ignored_paths.include?(split_path[0])
- ignored_paths << split_path[0]
- array_comparison(split_path[0], native_original_hash, native_converted_hash).each do |item|
- puts("#{item[0]}: #{item[1]}".ljust(60) + item[2]) if args.disp.eql? 'show'
- puts("#{item[0]}: ". + item[2]) if args.disp.eql? 'hide'
- end
- elsif !ignored_paths.include?(split_path[0]) && !path_leads_to_list?(node.parent.path, native_original_hash, native_converted_hash)
- puts("#{change}: #{node.to_xml}".ljust(60) + node.parent.path) if args.disp.eql? 'show'
- puts("#{change}: ". + node.parent.path) if args.disp.eql? 'hide'
- end
- end
- end
-
- def path_leads_to_list?(path, org_hash, conv_hash)
- # this method takes a path string (and the full original and converted hashes) and outputs true if the path string contains a list; else false
- org_hash_path = hash_navigation(path, org_hash)
- conv_hash_path = hash_navigation(path, conv_hash)
-
- if path.include?("[") && path.include?("]")
- bool = true
- elsif org_hash_path.is_a?(Hash) && conv_hash_path.is_a?(Hash)
- # the number of keys must be 1 because all arrays in echo10, dif10, and iso19115 are tagged similar to:
- # contact and so all array-containing tags will be the plural
- # of the array name. This clause serves to idenitfy array-containing tags when their paths aren't properly
- # displayed by nokogiri
- bool = true if org_hash_path.keys.length == 1 && org_hash_path[org_hash_path.keys[0]].is_a?(Array)
- bool = true if conv_hash_path.keys.length == 1 && conv_hash_path[conv_hash_path.keys[0]].is_a?(Array)
- elsif org_hash_path.is_a?(Array) || conv_hash_path.is_a?(Array)
- bool = true
- else
- bool = false
- end
- bool
- end
-
- def hash_navigation(dir, hash)
- # Passed a path string and the hash being navigated. This method parses the path string and
- # returns the hash at the end of the path
- dir = dir.split '/'
- dir.each do |key|
- if !key.empty? && hash.is_a?(Hash)
- hash = hash[key]
- elsif hash.is_a? Array
- return hash
- end
- end
- hash
- end
-
- def array_comparison(path, original_hash, converted_hash)
-
- org_array = hash_navigation(path, original_hash)
- conv_array = hash_navigation(path, converted_hash)
-
- org_array.is_a?(Array) ? org_arr = org_array.clone : org_arr = Array.wrap(org_array)
- org_array = Array.wrap(org_array) unless org_array.is_a?(Array)
- conv_array.is_a?(Array) ? conv_arr = conv_array.clone : conv_arr = Array.wrap(conv_array)
- conv_array = Array.wrap(conv_array) unless conv_array.is_a?(Array)
-
- # org_arr and conv_arr are copies of org_array and conv_array, respectively.
- # The *_arr values are edited during the comparison between the org_array and conv_array arrays
- # and so the *_array arrays are used to maintained a full version of each array for indexing the items in the following lines.
-
- for conv_item in conv_array
- for org_item in org_array
- if org_item.eql? conv_item
- org_arr.delete(org_item)
- conv_arr.delete(conv_item)
- break
- end
- end
- end
-
- output = Array.new
- org_arr.each do |item|
- path_with_index = path + "[#{org_array.index(item)}]"
- loss_item = ['-', item, path_with_index]
- output << loss_item
- end
-
-
- conv_arr.each do |item|
- path_with_index = path + "[#{conv_array.index(item)}]"
- loss_item = ['+', item, path_with_index]
- output << loss_item
- end
- output
- end
-end
From a267e4cc6aefd79324aaa06768a04d22a6f0b93d Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Mon, 27 Jul 2020 09:14:56 -0400
Subject: [PATCH 33/49] MMT-2313: added some test code to be removed later
---
app/helpers/loss_report_helper.rb | 12 +++++++-----
1 file changed, 7 insertions(+), 5 deletions(-)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index d40f1aa55..bea6def45 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -14,10 +14,8 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
#write files to test that all changes are being found with opendiff
dir = '/Users/ctrummer/Documents/devtesting'
- o = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks } .remove_namespaces!
- c = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks } .remove_namespaces!
- File.write("#{dir}/o_#{concept_id}.xml", o.to_xml)
- File.write("#{dir}/c_#{concept_id}.xml", c.to_xml)
+ File.write("#{dir}/o_#{concept_id}.xml", orig.to_xml)
+ File.write("#{dir}/c_#{concept_id}.xml", conv.to_xml)
arr_paths = Array.new # This array is used to keep track of the paths that lead to arrays that have already been mapped
text_output = String.new if disp == 'text'
@@ -185,6 +183,7 @@ def prepare_collections(concept_id, umm_c_version)
return original_collection_native_xml.body, translated_collection_native_xml.body, original_collection_native_hash, translated_collection_native_hash, content_type
end
+
def hash_navigation(path, hash)
# Passed a path string and the hash being navigated. This method parses the path string and
# returns the array/value at the end of the path
@@ -210,14 +209,17 @@ def array_comparison(path, original_hash, converted_hash)
post_translation_array == false ? post_translation_array = Array.new : post_translation_array = Array.wrap(post_translation_array)
output = Array.new
-
(pre_translation_array - post_translation_array).each do |item|
path_with_index = path + "[#{pre_translation_array.index(item)}]"
+ # the following line is used to eliminate indexing confusion when there is more than one occurrence of a particular item in an array
+ pre_translation_array[pre_translation_array.index(item)] = item.to_s + 'item indexed'
output << ['-', item, path_with_index]
end
(post_translation_array - pre_translation_array).each do |item|
path_with_index = path + "[#{post_translation_array.index(item)}]"
+ # the following line is used to eliminate indexing confusion when there is more than one occurrence of a particular item in an array
+ post_translation_array[post_translation_array.index(item)] = item.to_s + 'item indexed'
output << ['+', item, path_with_index]
end
output
From 937342dea65a6b22f315052cc0e613ae2d3fa513 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Tue, 28 Jul 2020 09:14:12 -0400
Subject: [PATCH 34/49] MMT-2313: Added some exception handling
---
app/helpers/loss_report_helper.rb | 43 +++++++++++++++++++++++--------
1 file changed, 32 insertions(+), 11 deletions(-)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index bea6def45..e28429c75 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -2,7 +2,14 @@ module LossReportHelper
def loss_report_output(concept_id, hide_items=true, disp='text')
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
- orig_xml,conv_xml,orig_h,conv_h,content_type = prepare_collections(concept_id, '1.15.3')
+
+ # prepare_collections returns false when the cmr_client endpoints are unsuccessfully executed
+ if (collections = prepare_collections(concept_id, '1.15.3'))
+ orig_xml,conv_xml,orig_h,conv_h,content_type = collections
+ else
+ return "Failure to get_concept or translate_collection" if disp == 'text'
+ return {"error"=>"Failure to get_concept or translate_collection"} if disp == 'json'
+ end
if content_type.include?('iso') || content_type.include?('dif')
orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks } .remove_namespaces!
@@ -29,8 +36,6 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
json_output['format'] = content_type if disp == 'json'
text_output += (content_type + "\n\n") if disp == 'text'
- # text_output += top_level_arr_path('/Collection/OnlineResources/OnlineResource', orig_h, conv_h).to_s+"\n"
-
orig.diff(conv, {:added => true, :removed => true}) do |change,node|
element = node.to_xml
path = node.parent.path.split('[')[0]
@@ -50,7 +55,7 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
arr_paths << arr_path
array_comparison(arr_path, orig_h, conv_h).each do |item| # all lists
- add_to_report('ar'+item[0], item[1], item[2], hide_items, disp, json_output, text_output)
+ add_to_report(item[0], item[1], item[2], hide_items, disp, json_output, text_output)
end
# FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
@@ -82,7 +87,7 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
arr_paths << arr_path
array_comparison(arr_path, orig_h, conv_h).each do |item|
- add_to_report('na'+item[0], item[1], item[2], hide_items, disp, json_output, text_output)
+ add_to_report(item[0], item[1], item[2], hide_items, disp, json_output, text_output)
end
# FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
@@ -91,7 +96,7 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
end
elsif path_not_checked?("#{path}/#{item['path']}", arr_paths)
- add_to_report('hn'+change, item['value'], "#{path}/#{item['path']}", hide_items, disp, json_output, text_output)
+ add_to_report(change, item['value'], "#{path}/#{item['path']}", hide_items, disp, json_output, text_output)
end
# FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
@@ -99,8 +104,10 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
# FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
end
+ elsif (attr,val = is_attribute?(node))
+ add_to_report(change, val, "#{path}/#{attr}" , hide_items, disp, json_output, text_output)
else
- add_to_report('ng'+change, element, path, hide_items, disp, json_output, text_output)
+ add_to_report(change, element, path, hide_items, disp, json_output, text_output)
# FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
puts "arr_paths: #{arr_paths}"
@@ -109,8 +116,18 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
end
end
end
- if disp == 'text' then return text_output
- elsif disp == 'json' then return json_output end
+ return text_output if disp == 'text'
+ return json_output if disp == 'json'
+ end
+
+ def is_attribute?(node)
+ if node.to_xml.include?('=') && !node.to_xml.include?(' = ')
+ attr_val = Array.new
+ node.to_xml.split('=').each {|item| attr_val << item.strip.delete('\\"')}
+ attr_val
+ else
+ false
+ end
end
def path_not_checked?(arr_path, arr_paths)
@@ -146,11 +163,12 @@ def top_level_arr_path(path, orig_h, conv_h)
def add_to_report(change, element, path, hide_items, disp, json_output, text_output)
@counter ||= 0 and @counter += 1
-
# this function serves to preclude complex nests from forming in loss_report_output the
# following 'if' structure is intended to increase readability by eliminating nests
return text_output.concat("#{@counter}.".ljust(4)+"#{change}: #{element}".ljust(60) + path + "\n") if hide_items == false && disp == 'text'
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
puts "#{@counter}.".ljust(4)+"#{change}: ".ljust(3) + path; return text_output.concat("#{@counter}.".ljust(4)+"#{change}: ".ljust(3) + path + "\n") if hide_items == true && disp == 'text'
+ # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
return json_output["#{@counter}. #{change}: #{path}"] = element if disp == 'json'
end
@@ -175,10 +193,13 @@ def is_xml?(node)
def prepare_collections(concept_id, umm_c_version)
# TODO: need to add exception handling for get_concept, translate_collection
original_collection_native_xml = cmr_client.get_concept(concept_id,token, {})
+ return false if !original_collection_native_xml.success?
content_type = original_collection_native_xml.headers.fetch('content-type').split(';')[0]
original_collection_native_hash = Hash.from_xml(original_collection_native_xml.body)
translated_collection_umm_json = cmr_client.translate_collection(original_collection_native_xml.body, content_type, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", skip_validation=true)
- translated_collection_native_xml = cmr_client.translate_collection(translated_collection_umm_json.body.to_json, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", content_type, skip_validation=true)
+ return false if !translated_collection_umm_json.success?
+ translated_collection_native_xml = cmr_client.translate_collection(JSON.pretty_generate(translated_collection_umm_json.body), "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", content_type, skip_validation=true)
+ return false if !translated_collection_native_xml.success?
translated_collection_native_hash = Hash.from_xml(translated_collection_native_xml.body)
return original_collection_native_xml.body, translated_collection_native_xml.body, original_collection_native_hash, translated_collection_native_hash, content_type
end
From fa56d22dfed8cffad2b282f2f3447dbf623d9f6d Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Wed, 29 Jul 2020 14:21:29 -0400
Subject: [PATCH 35/49] MMT-2313: created specs and stored some sample reports
---
app/controllers/collections_controller.rb | 2 +-
app/helpers/loss_report_helper.rb | 106 +--
config/routes.rb | 2 +-
spec/factories/loss_report_factory_data.rb | 831 ++++++++++++++++++
spec/features/collections/loss_report_spec.rb | 41 +
spec/helpers/loss_report_helper_spec.rb | 49 ++
6 files changed, 957 insertions(+), 74 deletions(-)
create mode 100644 spec/factories/loss_report_factory_data.rb
create mode 100644 spec/features/collections/loss_report_spec.rb
create mode 100644 spec/helpers/loss_report_helper_spec.rb
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index e148bed83..78e9cfaf5 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -122,7 +122,7 @@ def loss_report
# This method is needed to reference the appropriate helper and view for the lossiness report
concept_id = params[:id]
respond_to do |format|
- format.text {render plain: loss_report_output(concept_id, hide_items=true, disp='text') }
+ format.text { render plain: loss_report_output(concept_id, hide_items=true, disp='text') }
format.json { render json: JSON.pretty_generate(loss_report_output(concept_id, hide_items=false, disp='json')) }
end
end
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index e28429c75..e17a00e80 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -1,5 +1,17 @@
module LossReportHelper
+ def cmr_client
+ Cmr::Client.client_for_environment(Rails.configuration.cmr_env, Rails.configuration.services)
+ end
+
+ def token
+ if session[:login_method] == 'launchpad'
+ session[:launchpad_cookie]
+ elsif session[:login_method] == 'urs'
+ session[:access_token]
+ end
+ end
+
def loss_report_output(concept_id, hide_items=true, disp='text')
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
@@ -7,7 +19,7 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
if (collections = prepare_collections(concept_id, '1.15.3'))
orig_xml,conv_xml,orig_h,conv_h,content_type = collections
else
- return "Failure to get_concept or translate_collection" if disp == 'text'
+ return 'Failure to get_concept or translate_collection' if disp == 'text'
return {"error"=>"Failure to get_concept or translate_collection"} if disp == 'json'
end
@@ -41,78 +53,27 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
path = node.parent.path.split('[')[0]
arr_path = top_level_arr_path(path, orig_h, conv_h)
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- puts "---------------------------------------------------------------------------------"
- puts "arr_path: #{arr_path} ... node.parent.path: #{node.parent.path} ... path: #{path}"
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
-
if arr_path && path_not_checked?(arr_path, arr_paths)
-
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- puts "** path 1"
- puts "ar path_not_checked?(arr_path,arr_paths): #{path_not_checked?(arr_path,arr_paths).to_s}"
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
-
arr_paths << arr_path
- array_comparison(arr_path, orig_h, conv_h).each do |item| # all lists
- add_to_report(item[0], item[1], item[2], hide_items, disp, json_output, text_output)
- end
-
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- puts "arr_paths: #{arr_paths}"
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
-
+ array_comparison(arr_path, orig_h, conv_h).each { |item| add_to_report(item[0], item[1], item[2], hide_items, disp, json_output, text_output) }
elsif path_not_checked?(path, arr_paths) # nokogiri
-
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- puts "** path 2"
- puts "path_not_checked?(path,arr_paths): #{path_not_checked?(path,arr_paths).to_s}"
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
-
if is_xml?(node)
element = Hash.from_xml(element)
hash_map(element).each do |item|
arr_path = top_level_arr_path("#{path}/#{item['path']}", orig_h, conv_h)
-
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- puts "path_not_checked?('path/item['path']}, arr_paths): #{path_not_checked?("#{path}/#{item['path']}", arr_paths)}"
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
-
if arr_path && path_not_checked?("#{path}/#{item['path']}", arr_paths) # all list
if path_not_checked?(arr_path, arr_paths)
-
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- puts "na path_not_checked?(arr_path, arr_paths): #{path_not_checked?(arr_path, arr_paths)}"
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
-
arr_paths << arr_path
- array_comparison(arr_path, orig_h, conv_h).each do |item|
- add_to_report(item[0], item[1], item[2], hide_items, disp, json_output, text_output)
- end
-
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- puts "arr_paths: #{arr_paths}"
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
-
+ array_comparison(arr_path, orig_h, conv_h).each { |item| add_to_report(item[0], item[1], item[2], hide_items, disp, json_output, text_output) }
end
elsif path_not_checked?("#{path}/#{item['path']}", arr_paths)
add_to_report(change, item['value'], "#{path}/#{item['path']}", hide_items, disp, json_output, text_output)
end
-
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- puts "arr_paths: #{arr_paths}"
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
-
end
elsif (attr,val = is_attribute?(node))
add_to_report(change, val, "#{path}/#{attr}" , hide_items, disp, json_output, text_output)
else
add_to_report(change, element, path, hide_items, disp, json_output, text_output)
-
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- puts "arr_paths: #{arr_paths}"
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
-
end
end
end
@@ -120,7 +81,15 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
return json_output if disp == 'json'
end
+ def is_xml?(node)
+ # checks if the node being passed is xml
+ # may be beneficial to add more checks
+ node.to_xml.include?('<' && '' && '>') ? true : false
+ end
+
def is_attribute?(node)
+ # this method checks if the node being passed is an attribute change;
+ # TODO: it may be beneficial to add more conditions to improve accuracy
if node.to_xml.include?('=') && !node.to_xml.include?(' = ')
attr_val = Array.new
node.to_xml.split('=').each {|item| attr_val << item.strip.delete('\\"')}
@@ -131,20 +100,19 @@ def is_attribute?(node)
end
def path_not_checked?(arr_path, arr_paths)
- arr_paths.each do |path|
- if arr_path.include?(path)
- return false
- end
- end
+ # this method checks the arr_paths array to see if the path being added to
+ # the report has already been previously evaluated and added
+ arr_paths.each { |path| return false if arr_path.include?(path) }
true
end
def top_level_arr_path(path, orig_h, conv_h)
+ # if an array is passed that passes through an array ie. /Contacts/Contact[0]/Role/Name
+ # this method would return /Contacts/Contact because Contact is the outermost array (or false if the path doesn't contain an array)
pre_translation_array, pre_translation_path = hash_navigation(path, orig_h)
post_translation_array, post_translation_path = hash_navigation(path, conv_h)
return false if pre_translation_array == false && post_translation_array == false
-
return pre_translation_path if pre_translation_array.is_a?(Array)
return post_translation_path if post_translation_array.is_a?(Array)
@@ -153,11 +121,10 @@ def top_level_arr_path(path, orig_h, conv_h)
# of the array name. This clause serves to identify array-containing tags when their paths aren't properly
# displayed by nokogiri
if pre_translation_array.is_a?(Hash) && pre_translation_array.keys.length == 1 && pre_translation_array[pre_translation_array.keys[0]].is_a?(Array)
- return pre_translation_path + "/#{pre_translation_array.keys[0]}"
+ return "#{pre_translation_path}/#{pre_translation_array.keys[0]}"
elsif post_translation_array.is_a?(Hash) && post_translation_array.keys.length == 1 && post_translation_array[post_translation_array.keys[0]].is_a?(Array)
- return post_translation_path + "/#{post_translation_array.keys[0]}"
+ return "#{post_translation_path}/#{post_translation_array.keys[0]}"
end
-
path_contains_array = false
end
@@ -166,9 +133,7 @@ def add_to_report(change, element, path, hide_items, disp, json_output, text_out
# this function serves to preclude complex nests from forming in loss_report_output the
# following 'if' structure is intended to increase readability by eliminating nests
return text_output.concat("#{@counter}.".ljust(4)+"#{change}: #{element}".ljust(60) + path + "\n") if hide_items == false && disp == 'text'
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
- puts "#{@counter}.".ljust(4)+"#{change}: ".ljust(3) + path; return text_output.concat("#{@counter}.".ljust(4)+"#{change}: ".ljust(3) + path + "\n") if hide_items == true && disp == 'text'
- # FOR TROUBLESHOOTING -------------------------------------------------------------------------------------
+ return text_output.concat("#{@counter}.".ljust(4)+"#{change}: ".ljust(3) + path + "\n") if hide_items == true && disp == 'text'
return json_output["#{@counter}. #{change}: #{path}"] = element if disp == 'json'
end
@@ -185,21 +150,18 @@ def hash_map(hash)
buckets
end
- def is_xml?(node)
- if node.to_xml.include?('<' && '' && '>') then return true
- else return false end
- end
-
def prepare_collections(concept_id, umm_c_version)
- # TODO: need to add exception handling for get_concept, translate_collection
original_collection_native_xml = cmr_client.get_concept(concept_id,token, {})
return false if !original_collection_native_xml.success?
+
content_type = original_collection_native_xml.headers.fetch('content-type').split(';')[0]
original_collection_native_hash = Hash.from_xml(original_collection_native_xml.body)
translated_collection_umm_json = cmr_client.translate_collection(original_collection_native_xml.body, content_type, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", skip_validation=true)
return false if !translated_collection_umm_json.success?
+
translated_collection_native_xml = cmr_client.translate_collection(JSON.pretty_generate(translated_collection_umm_json.body), "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", content_type, skip_validation=true)
return false if !translated_collection_native_xml.success?
+
translated_collection_native_hash = Hash.from_xml(translated_collection_native_xml.body)
return original_collection_native_xml.body, translated_collection_native_xml.body, original_collection_native_hash, translated_collection_native_hash, content_type
end
diff --git a/config/routes.rb b/config/routes.rb
index 74e5b20ac..b5e17c668 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -69,7 +69,7 @@
get '/collections/:id/download_xml/:format(/:revision_id)' => 'collections#download_xml', as: 'download_collection_xml'
get '/collections/:id/create_delete_proposal' => 'collections#create_delete_proposal', as: 'create_delete_proposal_collection'
get '/collections/:id/create_update_proposal' => 'collections#create_update_proposal', as: 'create_update_proposal_collection'
- get '/collections/:id/loss' => 'collections#loss_report'
+ get '/collections/:id/loss' => 'collections#loss_report', as: 'loss_report_collections'
resource :variable_generation_processes_search, only: [:new]
diff --git a/spec/factories/loss_report_factory_data.rb b/spec/factories/loss_report_factory_data.rb
new file mode 100644
index 000000000..517da5094
--- /dev/null
+++ b/spec/factories/loss_report_factory_data.rb
@@ -0,0 +1,831 @@
+
+def dif_id
+ 'C1200000031-SEDAC'
+end
+
+def iso_id
+ 'C1200000089-LARC'
+end
+
+def echo_id
+ 'C1200000040-SEDAC'
+end
+
+def iso_json_report
+ {
+ "format" => "application/iso:smap+xml",
+ "1. -: /DS_Series/schemaLocation" => "http://www.isotc211.org/2005/gmi http://cdn.earthdata.nasa.gov/iso/schema/1.0/ISO19115-2_EOS.xsd",
+ "2. -: /DS_Series/seriesMetadata/MI_Metadata/fileIdentifier/FileName" => "L4_SM_aup",
+ "3. -: /DS_Series/seriesMetadata/MI_Metadata/characterSet/MD_CharacterSetCode" => "utf8",
+ "4. -: /DS_Series/seriesMetadata/MI_Metadata/hierarchyLevel/MD_ScopeCode" => "series",
+ "5. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/attributeDescription" => nil,
+ "6. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/contentType" => nil,
+ "7. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/code/CharacterString" => "Not provided",
+ "8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString" => "gov.nasa.esdis.umm.processinglevelid",
+ "9. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardName/CharacterString" => "ISO 19115-2 Geographic information - Metadata - Part 2: Extensions for imagery and gridded data",
+ "10. +: /DS_Series/seriesMetadata/MI_Metadata/dataQualityInfo/DQ_DataQuality/scope/DQ_Scope/level/MD_ScopeCode" => "series",
+ "11. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/id" => "dba588298-ef6b-4e0f-9092-d1bfe87001ea",
+ "12. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/code/CharacterString" => "Not provided",
+ "13. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/codeSpace/CharacterString" => "gov.nasa.esdis.umm.platformshortname",
+ "14. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/description/CharacterString" => "Not provided",
+ "15. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/instrument/nilReason" => "inapplicable",
+ "16. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardVersion/CharacterString" => "ISO 19115-2:2009-02-15",
+ "17. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "Date" => "2016-04-29"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, "edition" => {
+ "CharacterString" => "Vv2010"
+ }, "identifier" => [{
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SPL4SMAU"
+ }, "codeSpace" => {
+ "CharacterString" => "http://gmao.gsfc.nasa.gov"
+ }, "description" => {
+ "CharacterString" => "The ECS Short Name"
+ }
+ }
+ }, {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "002"
+ }, "codeSpace" => {
+ "CharacterString" => "gov.nasa.esdis"
+ }, "description" => {
+ "CharacterString" => "The ECS Version ID"
+ }
+ }
+ }, {
+ "MD_Identifier" => {
+ "code" => {
+ "Anchor" => "doi:10.5067/JJY2V0GJNFRZ"
+ }, "codeSpace" => {
+ "CharacterString" => "gov.nasa.esdis"
+ }, "description" => {
+ "CharacterString" => "A Digital Object Identifier (DOI) that provides a persistent interoperable means to locate the SMAP Level 4 Radar data product."
+ }
+ }
+ }], "citedResponsibleParty" => [{
+ "CI_ResponsibleParty" => {
+ "organisationName" => {
+ "CharacterString" => "National Aeronautics and Space Administration"
+ }, "role" => {
+ "CI_RoleCode" => "resourceProvider"
+ }
+ }
+ }, {
+ "CI_ResponsibleParty" => {
+ "organisationName" => {
+ "CharacterString" => "Global Modeling and Assimilation Office"
+ }, "role" => {
+ "CI_RoleCode" => "originator"
+ }
+ }
+ }], "presentationForm" => {
+ "CI_PresentationFormCode" => "documentDigital"
+ }, "series" => {
+ "CI_Series" => {
+ "name" => {
+ "CharacterString" => "L4_SM"
+ }
+ }
+ }, "otherCitationDetails" => {
+ "CharacterString" => "The first Validated Release of the SMAP Level 4 Science Processing Software."
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
+ }, "purpose" => {
+ "CharacterString" => "The SMAP L4_SM data product provides spatially and temporally complete surface and root zone soil moisture information for science and applications users."
+ }, "credit" => {
+ "CharacterString" => "The software that generates the L4_SM data product and the data system that automates its production were designed and implemented at the NASA Global Modeling and Assimilation Office, Goddard Space Flight Center, Greenbelt, Maryland, USA."
+ }, "status" => {
+ "MD_ProgressCode" => "onGoing"
+ }, "pointOfContact" => {
+ "CI_ResponsibleParty" => {
+ "organisationName" => {
+ "CharacterString" => "PVC"
+ }, "role" => {
+ "CI_RoleCode" => "distributor"
+ }
+ }
+ }, "resourceMaintenance" => {
+ "MD_MaintenanceInformation" => {
+ "maintenanceAndUpdateFrequency" => {
+ "MD_MaintenanceFrequencyCode" => "As Needed"
+ }, "dateOfNextUpdate" => {
+ "Date" => "2016-11-01"
+ }, "updateScope" => {
+ "MD_ScopeCode" => "series"
+ }
+ }
+ }, "resourceFormat" => {
+ "MD_Format" => {
+ "name" => {
+ "CharacterString" => "HDF5"
+ }, "version" => {
+ "CharacterString" => "Version 1.8.9"
+ }
+ }
+ }, "descriptiveKeywords" => [{
+ "MD_Keywords" => {
+ "keyword" => [{
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT"
+ }, {
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > SURFACE SOIL MOISTURE"
+ }, {
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > ROOT ZONE SOIL MOISTURE"
+ }], "type" => {
+ "MD_KeywordTypeCode" => "theme"
+ }, "thesaurusName" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "NASA/GCMD Earth Science Keywords"
+ }, "date" => {
+ "gco:nilReason" => "missing"
+ }
+ }
+ }
+ }
+ }, {
+ "MD_Keywords" => {
+ "keyword" => {
+ "CharacterString" => "Earth Remote Sensing Instruments > Active Remote Sensing > NONE > SMAP L-BAND RADAR > SMAP L-Band Radar"
+ }, "type" => {
+ "MD_KeywordTypeCode" => "theme"
+ }, "thesaurusName" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "NASA/GCMD Earth Science Keywords"
+ }, "date" => {
+ "gco:nilReason" => "missing"
+ }
+ }
+ }
+ }
+ }, {
+ "MD_Keywords" => {
+ "keyword" => {
+ "CharacterString" => "Earth Observation Satellites > NASA Decadal Survey > SMAP > Soil Moisture Active and Passive Observatory"
+ }, "type" => {
+ "MD_KeywordTypeCode" => "theme"
+ }, "thesaurusName" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "NASA/GCMD Earth Science Keywords"
+ }, "date" => {
+ "gco:nilReason" => "missing"
+ }
+ }
+ }
+ }
+ }, {
+ "MD_Keywords" => {
+ "keyword" => {
+ "CharacterString" => "GEOGRAPHIC REGION > GLOBAL"
+ }, "type" => {
+ "MD_KeywordTypeCode" => "theme"
+ }, "thesaurusName" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "NASA/GCMD Earth Science Keywords"
+ }, "date" => {
+ "gco:nilReason" => "missing"
+ }
+ }
+ }
+ }
+ }], "aggregationInfo" => {
+ "MD_AggregateInformation" => {
+ "aggregateDataSetIdentifier" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SMAP"
+ }
+ }
+ }, "associationType" => {
+ "DS_AssociationTypeCode" => "largerWorkCitation"
+ }, "initiativeType" => {
+ "DS_InitiativeTypeCode" => "mission"
+ }
+ }
+ }, "language" => {
+ "CharacterString" => "eng"
+ }, "characterSet" => {
+ "MD_CharacterSetCode" => "utf8"
+ }, "topicCategory" => {
+ "MD_TopicCategoryCode" => "geoscientificInformation"
+ }, "environmentDescription" => {
+ "CharacterString" => "Data product generated by the SMAP mission in HDF5 format with metadata that conforms to the ISO 19115 model."
+ }, "extent" => {
+ "EX_Extent" => {
+ "description" => {
+ "CharacterString" => "Global land excluding inland water and permanent ice."
+ }, "geographicElement" => {
+ "EX_GeographicBoundingBox" => {
+ "extentTypeCode" => {
+ "Boolean" => "1"
+ }, "westBoundLongitude" => {
+ "Decimal" => "-180"
+ }, "eastBoundLongitude" => {
+ "Decimal" => "180"
+ }, "southBoundLatitude" => {
+ "Decimal" => "-85.04456"
+ }, "northBoundLatitude" => {
+ "Decimal" => "85.04456"
+ }
+ }
+ }, "temporalElement" => {
+ "EX_TemporalExtent" => {
+ "extent" => {
+ "TimePeriod" => {
+ "gml:id" => "swathTemporalExtent", "beginPosition" => "2015-03-31T01:30:00.000Z", "endPosition" => "2021-01-01T01:29:59.999Z"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "18. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "Soil Moisture Active Passive Mission Level 4 Surface and Root Zone Soil Moisture (L4_SM) Product Specification Document"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "Date" => "2015-10-31"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "publication"
+ }
+ }
+ }, "edition" => {
+ "CharacterString" => "1.4"
+ }, "identifier" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "L4_SM"
+ }, "codeSpace" => {
+ "CharacterString" => "http://gmao.gsfc.nasa.gov"
+ }, "description" => {
+ "CharacterString" => "A short name used by the Soil Moisture Active Passive (SMAP) mission to identify the Level 4 Radar product."
+ }
+ }
+ }, "presentationForm" => {
+ "CI_PresentationFormCode" => "documentDigital"
+ }, "series" => {
+ "CI_Series" => {
+ "name" => {
+ "CharacterString" => "L4_SM"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "19. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[2]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "DataSetId"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-12T11:50:19.050Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "DataSetId"
+ }, "aggregationInfo" => {
+ "MD_AggregateInformation" => {
+ "aggregateDataSetIdentifier" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002"
+ }
+ }
+ }, "associationType" => nil
+ }
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "20. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[3]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "InsertTime"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-08T09:16:24.835Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "creation"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "InsertTime"
+ }, "purpose" => {
+ "CharacterString" => "InsertTime"
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "UpdateTime"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-12T11:50:19.050Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "UpdateTime"
+ }, "purpose" => {
+ "CharacterString" => "UpdateTime"
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "22. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[5]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "DIFID"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-12T11:50:19.050Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, "identifier" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SPL4SMAU"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "DIFID"
+ }, "purpose" => {
+ "CharacterString" => "DIFID"
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "23. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update"
+ }, "date" => [{
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-04-29T00:00:00.000Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-12T11:50:19.050Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-08T09:16:24.835Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "creation"
+ }
+ }
+ }], "edition" => {
+ "CharacterString" => "Vv2010"
+ }, "identifier" => [{
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SPL4SMAU"
+ }, "description" => {
+ "CharacterString" => "The ECS Short Name"
+ }
+ }
+ }, {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "002"
+ }, "description" => {
+ "CharacterString" => "The ECS Version ID"
+ }
+ }
+ }, {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "doi:10.5067/JJY2V0GJNFRZ"
+ }, "codeSpace" => {
+ "CharacterString" => "gov.nasa.esdis.umm.doi"
+ }, "description" => {
+ "CharacterString" => "DOI"
+ }
+ }
+ }], "presentationForm" => {
+ "CI_PresentationFormCode" => "documentDigital"
+ }, "series" => {
+ "CI_Series" => {
+ "name" => {
+ "CharacterString" => "L4_SM"
+ }
+ }
+ }, "otherCitationDetails" => {
+ "CharacterString" => "The first Validated Release of the SMAP Level 4 Science Processing Software."
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
+ }, "purpose" => {
+ "gco:nilReason" => "missing", "CharacterString" => "The SMAP L4_SM data product provides spatially and temporally complete surface and root zone soil moisture information for science and applications users."
+ }, "status" => {
+ "MD_ProgressCode" => "onGoing"
+ }, "pointOfContact" => {
+ "CI_ResponsibleParty" => {
+ "organisationName" => {
+ "CharacterString" => "PVC"
+ }, "role" => {
+ "CI_RoleCode" => "distributor"
+ }
+ }
+ }, "descriptiveKeywords" => [{
+ "MD_Keywords" => {
+ "keyword" => [{
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > NONE"
+ }, {
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > SURFACE SOIL MOISTURE"
+ }, {
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > ROOT ZONE SOIL MOISTURE"
+ }], "type" => {
+ "MD_KeywordTypeCode" => "theme"
+ }, "thesaurusName" => {
+ "gco:nilReason" => "unknown"
+ }
+ }
+ }, {
+ "MD_Keywords" => {
+ "keyword" => {
+ "CharacterString" => "Aircraft > Not provided > Not provided > "
+ }
+ }
+ }], "language" => {
+ "CharacterString" => "eng"
+ }, "topicCategory" => {
+ "MD_TopicCategoryCode" => "geoscientificInformation"
+ }, "extent" => {
+ "EX_Extent" => {
+ "geographicElement" => {
+ "EX_GeographicBoundingBox" => {
+ "extentTypeCode" => {
+ "Boolean" => "1"
+ }, "westBoundLongitude" => {
+ "Decimal" => "-180.0"
+ }, "eastBoundLongitude" => {
+ "Decimal" => "180.0"
+ }, "southBoundLatitude" => {
+ "Decimal" => "-85.04456"
+ }, "northBoundLatitude" => {
+ "Decimal" => "85.04456"
+ }
+ }
+ }, "temporalElement" => {
+ "EX_TemporalExtent" => {
+ "extent" => {
+ "TimePeriod" => {
+ "gml:id" => "dc46625fa-ae1e-4c95-a6ae-b15dd90fe8d3", "beginPosition" => "2015-03-31T01:30:00.000Z", "endPosition" => "2021-01-01T01:29:59.999Z"
+ }
+ }
+ }
+ }
+ }
+ }, "processingLevel" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "Not provided"
+ }, "codeSpace" => {
+ "CharacterString" => "gov.nasa.esdis.umm.processinglevelid"
+ }
+ }
+ }
+ }
+ },
+ "24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "DataSetId"
+ }, "date" => [{
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-04-29T00:00:00.000Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-12T11:50:19.050Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-08T09:16:24.835Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "creation"
+ }
+ }
+ }], "citedResponsibleParty" => {
+ "CI_ResponsibleParty" => {
+ "organisationName" => {
+ "CharacterString" => "Global Modeling and Assimilation Office"
+ }, "role" => {
+ "CI_RoleCode" => "originator"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "DataSetId"
+ }, "resourceFormat" => {
+ "MD_Format" => {
+ "name" => {
+ "CharacterString" => "HDF5"
+ }, "version" => {
+ "gco:nilReason" => "unknown"
+ }
+ }
+ }, "aggregationInfo" => {
+ "MD_AggregateInformation" => {
+ "aggregateDataSetIdentifier" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002"
+ }
+ }
+ }, "associationType" => {
+ "DS_AssociationTypeCode" => "largerWorkCitation"
+ }
+ }
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString" => "NSIDC DAAC > National Snow and Ice Data Center DAAC", "26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString" => "nsidc@nsidc.org", "27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL" => "http://nsidc.org/daac/", "28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode" => "pointOfContact", "29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href" => "#alaskaSARContact", "30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2016-04-29", "31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2013-01-02"
+ }
+end
+
+def dif_json_report
+ {
+ "format" => "application/dif10+xml",
+ "1. -: /DIF/Temporal_Coverage/Temporal_Range_Type" => "Long Range",
+ "2. -: /DIF/Related_URL[1]" => {
+ "URL_Content_Type" => {
+ "Type" => "VIEW DATA SET LANDING PAGE"
+ }, "URL" => "http://dx.doi.org/10.7927/H4NK3BZJ", "Description" => "data set DOI and homepage"
+ },
+ "3. +: /DIF/Related_URL[1]" => {
+ "URL_Content_Type" => {
+ "Type" => "DATA SET LANDING PAGE"
+ }, "URL" => "http://dx.doi.org/10.7927/H4NK3BZJ", "Description" => "data set DOI and homepage"
+ }
+}
+end
+
+def echo_json_report
+ {
+ "format" => "application/echo10+xml",
+ "1. -: /Collection/Orderable" => "true",
+ "2. -: /Collection/Visible" => "true",
+ "3. -: /Collection/MaintenanceAndUpdateFrequency" => "As needed",
+ "4. +: /Collection/Temporal/EndsAtPresentFlag" => "false",
+ "5. +: /Collection/Temporal/RangeDateTime/BeginningDateTime" => "1970-01-01T00:00:00.000Z",
+ "6. +: /Collection/Platforms/Platform/ShortName" => "Not provided",
+ "7. +: /Collection/Platforms/Platform/LongName" => "Not provided",
+ "8. +: /Collection/Platforms/Platform/Type" => "Not provided",
+ "9. -: /Collection/AssociatedDIFs/DIF/EntryId" => "CIESIN_SEDAC_ANTHROMES_v2_1700",
+ "10. -: /Collection/InsertTime" => "2014-05-13T00:00:00Z",
+ "11. +: /Collection/InsertTime" => "2014-05-13T00:00:00.000Z",
+ "12. -: /Collection/LastUpdate" => "2015-08-04T00:00:00Z",
+ "13. +: /Collection/LastUpdate" => "2015-08-04T00:00:00.000Z",
+ "14. -: /Collection/LongName" => "Anthropogenic Biomes of the World, Version 2: 1700",
+ "15. +: /Collection/LongName" => "Not provided",
+ "16. -: /Collection/CollectionState" => "Final",
+ "17. +: /Collection/CollectionState" => "NOT PROVIDED",
+ "18. -: /Collection/Price" => "0",
+ "19. +: /Collection/Price" => " 0.00",
+ "20. -: /Collection/SpatialKeywords/Keyword[0]" => "Africa",
+ "21. -: /Collection/SpatialKeywords/Keyword[1]" => "Asia",
+ "22. +: /Collection/SpatialKeywords/Keyword[0]" => "AFRICA",
+ "23. +: /Collection/SpatialKeywords/Keyword[1]" => "GAZA STRIP",
+ "24. -: /Collection/Contacts/Contact[0]" => {
+ "Role" => "Archive", "HoursOfService" => "9:00 A.M. to 5:00 P.M., Monday to Friday", "OrganizationName" => "Socioeconomic Data and Applications Center (SEDAC)", "OrganizationAddresses" => {
+ "Address" => {
+ "StreetAddress" => "CIESIN, Columbia University, 61 Route 9W, P.O. Box 1000", "City" => "Palisades", "StateProvince" => "NY", "PostalCode" => "10964", "Country" => "USA"
+ }
+ }, "OrganizationPhones" => {
+ "Phone" => [{
+ "Number" => "+1 845-365-8920",
+ "Type" => "Telephone"
+ }, {
+ "Number" => "+1 845-365-8922",
+ "Type" => "Fax"
+ }]
+ }, "OrganizationEmails" => {
+ "Email" => "ciesin.info@ciesin.columbia.edu"
+ }, "ContactPersons" => {
+ "ContactPerson" => {
+ "FirstName" => "SEDAC", "MiddleName" => "User", "LastName" => "Services"
+ }
+ }
+ },
+ "25. +: /Collection/Contacts/Contact[0]" => {
+ "Role" => "PROCESSOR", "OrganizationName" => "SEDAC"
+ },
+ "26. +: /Collection/Contacts/Contact[1]" => {
+ "Role" => "ARCHIVER", "OrganizationName" => "SEDAC"
+ },
+ "27. +: /Collection/Contacts/Contact[2]" => {
+ "Role" => "ARCHIVER", "HoursOfService" => "9:00 A.M. to 5:00 P.M., Monday to Friday", "OrganizationName" => "Socioeconomic Data and Applications Center (SEDAC)", "OrganizationAddresses" => {
+ "Address" => {
+ "StreetAddress" => "CIESIN, Columbia University, 61 Route 9W, P.O. Box 1000", "City" => "Palisades", "StateProvince" => "NY", "PostalCode" => "10964", "Country" => "USA"
+ }
+ }, "OrganizationPhones" => {
+ "Phone" => [{
+ "Number" => "+1 845-365-8920",
+ "Type" => "Telephone"
+ }, {
+ "Number" => "+1 845-365-8922",
+ "Type" => "Fax"
+ }]
+ }, "OrganizationEmails" => {
+ "Email" => "ciesin.info@ciesin.columbia.edu"
+ }, "ContactPersons" => {
+ "ContactPerson" => {
+ "FirstName" => "SEDAC", "MiddleName" => "User", "LastName" => "Services", "JobPosition" => "TECHNICAL CONTACT"
+ }
+ }
+ },
+ "28. -: /Collection/SpatialInfo/SpatialCoverageType" => "Horizontal",
+ "29. +: /Collection/SpatialInfo/SpatialCoverageType" => "HORIZONTAL",
+ "30. -: /Collection/OnlineResources/OnlineResource/Type" => "DOI URL",
+ "31. +: /Collection/OnlineResources/OnlineResource/Type" => "CollectionURL : DATA SET LANDING PAGE",
+ "32. -: /Collection/Spatial/SpatialCoverageType" => "Horizontal",
+ "33. +: /Collection/Spatial/SpatialCoverageType" => "HORIZONTAL",
+ "34. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate" => "-180.000000",
+ "35. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate" => "-180.0",
+ "36. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate" => "90.000000",
+ "37. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate" => "90.0",
+ "38. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate" => "180.000000",
+ "39. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate" => "180.0",
+ "40. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate" => "-90.000000",
+ "41. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate" => "-90.0"
+}
+end
+
+def iso_text_report
+ 'application/iso:smap+xml
+
+ 1. -: /DS_Series/schemaLocation
+ 2. -: /DS_Series/seriesMetadata/MI_Metadata/fileIdentifier/FileName
+ 3. -: /DS_Series/seriesMetadata/MI_Metadata/characterSet/MD_CharacterSetCode
+ 4. -: /DS_Series/seriesMetadata/MI_Metadata/hierarchyLevel/MD_ScopeCode
+ 5. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/attributeDescription
+ 6. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/contentType
+ 7. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/code/CharacterString
+ 8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString
+ 9. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardName/CharacterString
+ 10. +: /DS_Series/seriesMetadata/MI_Metadata/dataQualityInfo/DQ_DataQuality/scope/DQ_Scope/level/MD_ScopeCode
+ 11. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/id
+ 12. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/code/CharacterString
+ 13. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/codeSpace/CharacterString
+ 14. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/description/CharacterString
+ 15. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/instrument/nilReason
+ 16. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardVersion/CharacterString
+ 17. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]
+ 18. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]
+ 19. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[2]
+ 20. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[3]
+ 21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]
+ 22. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[5]
+ 23. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]
+ 24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]
+ 25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString
+ 26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString
+ 27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL
+ 28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode
+ 29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href
+ 30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date
+ 31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date'
+end
+
+def echo_text_report
+ 'application/echo10+xml
+
+ 1. -: /Collection/Orderable
+ 2. -: /Collection/Visible
+ 3. -: /Collection/MaintenanceAndUpdateFrequency
+ 4. +: /Collection/Temporal/EndsAtPresentFlag
+ 5. +: /Collection/Temporal/RangeDateTime/BeginningDateTime
+ 6. +: /Collection/Platforms/Platform/ShortName
+ 7. +: /Collection/Platforms/Platform/LongName
+ 8. +: /Collection/Platforms/Platform/Type
+ 9. -: /Collection/AssociatedDIFs/DIF/EntryId
+ 10. -: /Collection/InsertTime
+ 11. +: /Collection/InsertTime
+ 12. -: /Collection/LastUpdate
+ 13. +: /Collection/LastUpdate
+ 14. -: /Collection/LongName
+ 15. +: /Collection/LongName
+ 16. -: /Collection/CollectionState
+ 17. +: /Collection/CollectionState
+ 18. -: /Collection/Price
+ 19. +: /Collection/Price
+ 20. -: /Collection/SpatialKeywords/Keyword[0]
+ 21. -: /Collection/SpatialKeywords/Keyword[1]
+ 22. +: /Collection/SpatialKeywords/Keyword[0]
+ 23. +: /Collection/SpatialKeywords/Keyword[1]
+ 24. -: /Collection/Contacts/Contact[0]
+ 25. +: /Collection/Contacts/Contact[0]
+ 26. +: /Collection/Contacts/Contact[1]
+ 27. +: /Collection/Contacts/Contact[2]
+ 28. -: /Collection/SpatialInfo/SpatialCoverageType
+ 29. +: /Collection/SpatialInfo/SpatialCoverageType
+ 30. -: /Collection/OnlineResources/OnlineResource/Type
+ 31. +: /Collection/OnlineResources/OnlineResource/Type
+ 32. -: /Collection/Spatial/SpatialCoverageType
+ 33. +: /Collection/Spatial/SpatialCoverageType
+ 34. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate
+ 35. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate
+ 36. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate
+ 37. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate
+ 38. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate
+ 39. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate
+ 40. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate
+ 41. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate'
+end
+
+def dif_text_report
+ 'application/dif10+xml
+
+ 1. -: /DIF/Temporal_Coverage/Temporal_Range_Type
+ 2. -: /DIF/Related_URL[1]
+ 3. +: /DIF/Related_URL[1]'
+end
diff --git a/spec/features/collections/loss_report_spec.rb b/spec/features/collections/loss_report_spec.rb
new file mode 100644
index 000000000..14e503ff3
--- /dev/null
+++ b/spec/features/collections/loss_report_spec.rb
@@ -0,0 +1,41 @@
+require 'rails_helper'
+
+describe 'Displaying the comparison report in browser', js: true do
+
+ context 'when accessing the comparison report' do
+
+ before do
+ login
+ end
+
+ context 'when displaying json' do
+ it 'properly displays the echo json report' do
+ visit loss_report_collections_path(echo_id, format:'json')
+ expect(page).to have_content('application/echo')
+ end
+ it 'properly displays the iso json report' do
+ visit loss_report_collections_path(iso_id, format:'json')
+ expect(page).to have_content('application/iso')
+ end
+ it 'properly displays the dif json report' do
+ visit loss_report_collections_path(dif_id, format:'json')
+ expect(page).to have_content('application/dif')
+ end
+ end
+
+ context 'when displaying text' do
+ it 'properly displays the echo text report' do
+ visit loss_report_collections_path(echo_id, format:'text')
+ expect(page).to have_content('application/echo')
+ end
+ it 'properly displays the iso text report' do
+ visit loss_report_collections_path(iso_id, format:'text')
+ expect(page).to have_content('application/iso')
+ end
+ it 'properly displays the dif text report' do
+ visit loss_report_collections_path(dif_id, format:'text')
+ expect(page).to have_content('application/dif')
+ end
+ end
+ end
+end
diff --git a/spec/helpers/loss_report_helper_spec.rb b/spec/helpers/loss_report_helper_spec.rb
new file mode 100644
index 000000000..a48655938
--- /dev/null
+++ b/spec/helpers/loss_report_helper_spec.rb
@@ -0,0 +1,49 @@
+require 'rails_helper'
+
+describe 'Loss Report Helper', js: true do
+ let(:umm_c_version) { '1.15.3' }
+
+ context '#prepare_collections' do
+ context 'when using cmr endpoints' do
+ it 'successfully retrieves and translates the dif collection' do
+ expect(helper.prepare_collections(dif_id, umm_c_version)).to be_truthy
+ end
+ it 'successfully retrieves and translates the iso collection' do
+ expect(helper.prepare_collections(iso_id, umm_c_version)).to be_truthy
+ end
+ it 'successfully retrieves and translates the echo collection' do
+ expect(helper.prepare_collections(echo_id, umm_c_version)).to be_truthy
+ end
+ end
+
+ context '#loss_report_output'
+ context 'when processing a dif collection' do
+ it 'successfully produces a text loss report' do
+ expect(helper.loss_report_output(dif_id).gsub(/\s+/, "")).to eql(dif_text_report.gsub(/\s+/, ""))
+ end
+ it 'successfully produces a json loss report' do
+ expect(helper.loss_report_output(dif_id, hide_items=false, disp='json')).to eql(dif_json_report)
+ end
+ end
+ context 'when processing an echo collection' do
+ it 'successfully produces a text loss report' do
+ expect(helper.loss_report_output(echo_id).gsub(/\s+/, "")).to eql(echo_text_report.gsub(/\s+/, ""))
+ end
+ it 'successfully produces a json loss report' do
+ expect(helper.loss_report_output(echo_id, hide_items=false, disp='json')).to eql(echo_json_report)
+ end
+ end
+ context 'when processing an iso collection' do
+ it 'successfully produces a text loss report' do
+ expect(helper.loss_report_output(iso_id).gsub(/\s+/, "")).to eql(iso_text_report.gsub(/\s+/, ""))
+ end
+ it 'successfully produces a json loss report' do
+ expect(helper.loss_report_output(iso_id, hide_items=false, disp='json')).to have_key("31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date")
+ end
+ end
+ # the reason the iso example only checks the last key (instead of verifying the full report) is that cmr adds/updates an 'id' attribute
+ # in the actual collection (every time it is translated) and therefore the the comparison report will always include this change
+ # except with a different value for the 'id' attribute. This would cause the equality between the hashes to evaluate false and fail the
+ # test every time. Checking the last change is a comparible solution because it
+ end
+end
From 20fd9e2966342ea347685800948ac7bead2beea6 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Wed, 29 Jul 2020 14:44:31 -0400
Subject: [PATCH 36/49] MMT-2313: moved loss_report_samples_helper.rb and added
it to the rails_helper.rb
---
spec/factories/loss_report_factory_data.rb | 831 --------------------
spec/rails_helper.rb | 2 +
spec/support/loss_report_samples_helper.rb | 834 +++++++++++++++++++++
3 files changed, 836 insertions(+), 831 deletions(-)
delete mode 100644 spec/factories/loss_report_factory_data.rb
create mode 100644 spec/support/loss_report_samples_helper.rb
diff --git a/spec/factories/loss_report_factory_data.rb b/spec/factories/loss_report_factory_data.rb
deleted file mode 100644
index 517da5094..000000000
--- a/spec/factories/loss_report_factory_data.rb
+++ /dev/null
@@ -1,831 +0,0 @@
-
-def dif_id
- 'C1200000031-SEDAC'
-end
-
-def iso_id
- 'C1200000089-LARC'
-end
-
-def echo_id
- 'C1200000040-SEDAC'
-end
-
-def iso_json_report
- {
- "format" => "application/iso:smap+xml",
- "1. -: /DS_Series/schemaLocation" => "http://www.isotc211.org/2005/gmi http://cdn.earthdata.nasa.gov/iso/schema/1.0/ISO19115-2_EOS.xsd",
- "2. -: /DS_Series/seriesMetadata/MI_Metadata/fileIdentifier/FileName" => "L4_SM_aup",
- "3. -: /DS_Series/seriesMetadata/MI_Metadata/characterSet/MD_CharacterSetCode" => "utf8",
- "4. -: /DS_Series/seriesMetadata/MI_Metadata/hierarchyLevel/MD_ScopeCode" => "series",
- "5. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/attributeDescription" => nil,
- "6. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/contentType" => nil,
- "7. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/code/CharacterString" => "Not provided",
- "8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString" => "gov.nasa.esdis.umm.processinglevelid",
- "9. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardName/CharacterString" => "ISO 19115-2 Geographic information - Metadata - Part 2: Extensions for imagery and gridded data",
- "10. +: /DS_Series/seriesMetadata/MI_Metadata/dataQualityInfo/DQ_DataQuality/scope/DQ_Scope/level/MD_ScopeCode" => "series",
- "11. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/id" => "dba588298-ef6b-4e0f-9092-d1bfe87001ea",
- "12. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/code/CharacterString" => "Not provided",
- "13. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/codeSpace/CharacterString" => "gov.nasa.esdis.umm.platformshortname",
- "14. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/description/CharacterString" => "Not provided",
- "15. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/instrument/nilReason" => "inapplicable",
- "16. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardVersion/CharacterString" => "ISO 19115-2:2009-02-15",
- "17. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "Date" => "2016-04-29"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, "edition" => {
- "CharacterString" => "Vv2010"
- }, "identifier" => [{
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SPL4SMAU"
- }, "codeSpace" => {
- "CharacterString" => "http://gmao.gsfc.nasa.gov"
- }, "description" => {
- "CharacterString" => "The ECS Short Name"
- }
- }
- }, {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "002"
- }, "codeSpace" => {
- "CharacterString" => "gov.nasa.esdis"
- }, "description" => {
- "CharacterString" => "The ECS Version ID"
- }
- }
- }, {
- "MD_Identifier" => {
- "code" => {
- "Anchor" => "doi:10.5067/JJY2V0GJNFRZ"
- }, "codeSpace" => {
- "CharacterString" => "gov.nasa.esdis"
- }, "description" => {
- "CharacterString" => "A Digital Object Identifier (DOI) that provides a persistent interoperable means to locate the SMAP Level 4 Radar data product."
- }
- }
- }], "citedResponsibleParty" => [{
- "CI_ResponsibleParty" => {
- "organisationName" => {
- "CharacterString" => "National Aeronautics and Space Administration"
- }, "role" => {
- "CI_RoleCode" => "resourceProvider"
- }
- }
- }, {
- "CI_ResponsibleParty" => {
- "organisationName" => {
- "CharacterString" => "Global Modeling and Assimilation Office"
- }, "role" => {
- "CI_RoleCode" => "originator"
- }
- }
- }], "presentationForm" => {
- "CI_PresentationFormCode" => "documentDigital"
- }, "series" => {
- "CI_Series" => {
- "name" => {
- "CharacterString" => "L4_SM"
- }
- }
- }, "otherCitationDetails" => {
- "CharacterString" => "The first Validated Release of the SMAP Level 4 Science Processing Software."
- }
- }
- }, "abstract" => {
- "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
- }, "purpose" => {
- "CharacterString" => "The SMAP L4_SM data product provides spatially and temporally complete surface and root zone soil moisture information for science and applications users."
- }, "credit" => {
- "CharacterString" => "The software that generates the L4_SM data product and the data system that automates its production were designed and implemented at the NASA Global Modeling and Assimilation Office, Goddard Space Flight Center, Greenbelt, Maryland, USA."
- }, "status" => {
- "MD_ProgressCode" => "onGoing"
- }, "pointOfContact" => {
- "CI_ResponsibleParty" => {
- "organisationName" => {
- "CharacterString" => "PVC"
- }, "role" => {
- "CI_RoleCode" => "distributor"
- }
- }
- }, "resourceMaintenance" => {
- "MD_MaintenanceInformation" => {
- "maintenanceAndUpdateFrequency" => {
- "MD_MaintenanceFrequencyCode" => "As Needed"
- }, "dateOfNextUpdate" => {
- "Date" => "2016-11-01"
- }, "updateScope" => {
- "MD_ScopeCode" => "series"
- }
- }
- }, "resourceFormat" => {
- "MD_Format" => {
- "name" => {
- "CharacterString" => "HDF5"
- }, "version" => {
- "CharacterString" => "Version 1.8.9"
- }
- }
- }, "descriptiveKeywords" => [{
- "MD_Keywords" => {
- "keyword" => [{
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT"
- }, {
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > SURFACE SOIL MOISTURE"
- }, {
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > ROOT ZONE SOIL MOISTURE"
- }], "type" => {
- "MD_KeywordTypeCode" => "theme"
- }, "thesaurusName" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "NASA/GCMD Earth Science Keywords"
- }, "date" => {
- "gco:nilReason" => "missing"
- }
- }
- }
- }
- }, {
- "MD_Keywords" => {
- "keyword" => {
- "CharacterString" => "Earth Remote Sensing Instruments > Active Remote Sensing > NONE > SMAP L-BAND RADAR > SMAP L-Band Radar"
- }, "type" => {
- "MD_KeywordTypeCode" => "theme"
- }, "thesaurusName" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "NASA/GCMD Earth Science Keywords"
- }, "date" => {
- "gco:nilReason" => "missing"
- }
- }
- }
- }
- }, {
- "MD_Keywords" => {
- "keyword" => {
- "CharacterString" => "Earth Observation Satellites > NASA Decadal Survey > SMAP > Soil Moisture Active and Passive Observatory"
- }, "type" => {
- "MD_KeywordTypeCode" => "theme"
- }, "thesaurusName" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "NASA/GCMD Earth Science Keywords"
- }, "date" => {
- "gco:nilReason" => "missing"
- }
- }
- }
- }
- }, {
- "MD_Keywords" => {
- "keyword" => {
- "CharacterString" => "GEOGRAPHIC REGION > GLOBAL"
- }, "type" => {
- "MD_KeywordTypeCode" => "theme"
- }, "thesaurusName" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "NASA/GCMD Earth Science Keywords"
- }, "date" => {
- "gco:nilReason" => "missing"
- }
- }
- }
- }
- }], "aggregationInfo" => {
- "MD_AggregateInformation" => {
- "aggregateDataSetIdentifier" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SMAP"
- }
- }
- }, "associationType" => {
- "DS_AssociationTypeCode" => "largerWorkCitation"
- }, "initiativeType" => {
- "DS_InitiativeTypeCode" => "mission"
- }
- }
- }, "language" => {
- "CharacterString" => "eng"
- }, "characterSet" => {
- "MD_CharacterSetCode" => "utf8"
- }, "topicCategory" => {
- "MD_TopicCategoryCode" => "geoscientificInformation"
- }, "environmentDescription" => {
- "CharacterString" => "Data product generated by the SMAP mission in HDF5 format with metadata that conforms to the ISO 19115 model."
- }, "extent" => {
- "EX_Extent" => {
- "description" => {
- "CharacterString" => "Global land excluding inland water and permanent ice."
- }, "geographicElement" => {
- "EX_GeographicBoundingBox" => {
- "extentTypeCode" => {
- "Boolean" => "1"
- }, "westBoundLongitude" => {
- "Decimal" => "-180"
- }, "eastBoundLongitude" => {
- "Decimal" => "180"
- }, "southBoundLatitude" => {
- "Decimal" => "-85.04456"
- }, "northBoundLatitude" => {
- "Decimal" => "85.04456"
- }
- }
- }, "temporalElement" => {
- "EX_TemporalExtent" => {
- "extent" => {
- "TimePeriod" => {
- "gml:id" => "swathTemporalExtent", "beginPosition" => "2015-03-31T01:30:00.000Z", "endPosition" => "2021-01-01T01:29:59.999Z"
- }
- }
- }
- }
- }
- }
- }
- },
- "18. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "Soil Moisture Active Passive Mission Level 4 Surface and Root Zone Soil Moisture (L4_SM) Product Specification Document"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "Date" => "2015-10-31"
- }, "dateType" => {
- "CI_DateTypeCode" => "publication"
- }
- }
- }, "edition" => {
- "CharacterString" => "1.4"
- }, "identifier" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "L4_SM"
- }, "codeSpace" => {
- "CharacterString" => "http://gmao.gsfc.nasa.gov"
- }, "description" => {
- "CharacterString" => "A short name used by the Soil Moisture Active Passive (SMAP) mission to identify the Level 4 Radar product."
- }
- }
- }, "presentationForm" => {
- "CI_PresentationFormCode" => "documentDigital"
- }, "series" => {
- "CI_Series" => {
- "name" => {
- "CharacterString" => "L4_SM"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "19. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[2]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "DataSetId"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-12T11:50:19.050Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "DataSetId"
- }, "aggregationInfo" => {
- "MD_AggregateInformation" => {
- "aggregateDataSetIdentifier" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002"
- }
- }
- }, "associationType" => nil
- }
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "20. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[3]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "InsertTime"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-08T09:16:24.835Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "creation"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "InsertTime"
- }, "purpose" => {
- "CharacterString" => "InsertTime"
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "UpdateTime"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-12T11:50:19.050Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "UpdateTime"
- }, "purpose" => {
- "CharacterString" => "UpdateTime"
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "22. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[5]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "DIFID"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-12T11:50:19.050Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, "identifier" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SPL4SMAU"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "DIFID"
- }, "purpose" => {
- "CharacterString" => "DIFID"
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "23. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update"
- }, "date" => [{
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-04-29T00:00:00.000Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-12T11:50:19.050Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-08T09:16:24.835Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "creation"
- }
- }
- }], "edition" => {
- "CharacterString" => "Vv2010"
- }, "identifier" => [{
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SPL4SMAU"
- }, "description" => {
- "CharacterString" => "The ECS Short Name"
- }
- }
- }, {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "002"
- }, "description" => {
- "CharacterString" => "The ECS Version ID"
- }
- }
- }, {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "doi:10.5067/JJY2V0GJNFRZ"
- }, "codeSpace" => {
- "CharacterString" => "gov.nasa.esdis.umm.doi"
- }, "description" => {
- "CharacterString" => "DOI"
- }
- }
- }], "presentationForm" => {
- "CI_PresentationFormCode" => "documentDigital"
- }, "series" => {
- "CI_Series" => {
- "name" => {
- "CharacterString" => "L4_SM"
- }
- }
- }, "otherCitationDetails" => {
- "CharacterString" => "The first Validated Release of the SMAP Level 4 Science Processing Software."
- }
- }
- }, "abstract" => {
- "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
- }, "purpose" => {
- "gco:nilReason" => "missing", "CharacterString" => "The SMAP L4_SM data product provides spatially and temporally complete surface and root zone soil moisture information for science and applications users."
- }, "status" => {
- "MD_ProgressCode" => "onGoing"
- }, "pointOfContact" => {
- "CI_ResponsibleParty" => {
- "organisationName" => {
- "CharacterString" => "PVC"
- }, "role" => {
- "CI_RoleCode" => "distributor"
- }
- }
- }, "descriptiveKeywords" => [{
- "MD_Keywords" => {
- "keyword" => [{
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > NONE"
- }, {
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > SURFACE SOIL MOISTURE"
- }, {
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > ROOT ZONE SOIL MOISTURE"
- }], "type" => {
- "MD_KeywordTypeCode" => "theme"
- }, "thesaurusName" => {
- "gco:nilReason" => "unknown"
- }
- }
- }, {
- "MD_Keywords" => {
- "keyword" => {
- "CharacterString" => "Aircraft > Not provided > Not provided > "
- }
- }
- }], "language" => {
- "CharacterString" => "eng"
- }, "topicCategory" => {
- "MD_TopicCategoryCode" => "geoscientificInformation"
- }, "extent" => {
- "EX_Extent" => {
- "geographicElement" => {
- "EX_GeographicBoundingBox" => {
- "extentTypeCode" => {
- "Boolean" => "1"
- }, "westBoundLongitude" => {
- "Decimal" => "-180.0"
- }, "eastBoundLongitude" => {
- "Decimal" => "180.0"
- }, "southBoundLatitude" => {
- "Decimal" => "-85.04456"
- }, "northBoundLatitude" => {
- "Decimal" => "85.04456"
- }
- }
- }, "temporalElement" => {
- "EX_TemporalExtent" => {
- "extent" => {
- "TimePeriod" => {
- "gml:id" => "dc46625fa-ae1e-4c95-a6ae-b15dd90fe8d3", "beginPosition" => "2015-03-31T01:30:00.000Z", "endPosition" => "2021-01-01T01:29:59.999Z"
- }
- }
- }
- }
- }
- }, "processingLevel" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "Not provided"
- }, "codeSpace" => {
- "CharacterString" => "gov.nasa.esdis.umm.processinglevelid"
- }
- }
- }
- }
- },
- "24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "DataSetId"
- }, "date" => [{
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-04-29T00:00:00.000Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-12T11:50:19.050Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-08T09:16:24.835Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "creation"
- }
- }
- }], "citedResponsibleParty" => {
- "CI_ResponsibleParty" => {
- "organisationName" => {
- "CharacterString" => "Global Modeling and Assimilation Office"
- }, "role" => {
- "CI_RoleCode" => "originator"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "DataSetId"
- }, "resourceFormat" => {
- "MD_Format" => {
- "name" => {
- "CharacterString" => "HDF5"
- }, "version" => {
- "gco:nilReason" => "unknown"
- }
- }
- }, "aggregationInfo" => {
- "MD_AggregateInformation" => {
- "aggregateDataSetIdentifier" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002"
- }
- }
- }, "associationType" => {
- "DS_AssociationTypeCode" => "largerWorkCitation"
- }
- }
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString" => "NSIDC DAAC > National Snow and Ice Data Center DAAC", "26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString" => "nsidc@nsidc.org", "27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL" => "http://nsidc.org/daac/", "28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode" => "pointOfContact", "29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href" => "#alaskaSARContact", "30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2016-04-29", "31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2013-01-02"
- }
-end
-
-def dif_json_report
- {
- "format" => "application/dif10+xml",
- "1. -: /DIF/Temporal_Coverage/Temporal_Range_Type" => "Long Range",
- "2. -: /DIF/Related_URL[1]" => {
- "URL_Content_Type" => {
- "Type" => "VIEW DATA SET LANDING PAGE"
- }, "URL" => "http://dx.doi.org/10.7927/H4NK3BZJ", "Description" => "data set DOI and homepage"
- },
- "3. +: /DIF/Related_URL[1]" => {
- "URL_Content_Type" => {
- "Type" => "DATA SET LANDING PAGE"
- }, "URL" => "http://dx.doi.org/10.7927/H4NK3BZJ", "Description" => "data set DOI and homepage"
- }
-}
-end
-
-def echo_json_report
- {
- "format" => "application/echo10+xml",
- "1. -: /Collection/Orderable" => "true",
- "2. -: /Collection/Visible" => "true",
- "3. -: /Collection/MaintenanceAndUpdateFrequency" => "As needed",
- "4. +: /Collection/Temporal/EndsAtPresentFlag" => "false",
- "5. +: /Collection/Temporal/RangeDateTime/BeginningDateTime" => "1970-01-01T00:00:00.000Z",
- "6. +: /Collection/Platforms/Platform/ShortName" => "Not provided",
- "7. +: /Collection/Platforms/Platform/LongName" => "Not provided",
- "8. +: /Collection/Platforms/Platform/Type" => "Not provided",
- "9. -: /Collection/AssociatedDIFs/DIF/EntryId" => "CIESIN_SEDAC_ANTHROMES_v2_1700",
- "10. -: /Collection/InsertTime" => "2014-05-13T00:00:00Z",
- "11. +: /Collection/InsertTime" => "2014-05-13T00:00:00.000Z",
- "12. -: /Collection/LastUpdate" => "2015-08-04T00:00:00Z",
- "13. +: /Collection/LastUpdate" => "2015-08-04T00:00:00.000Z",
- "14. -: /Collection/LongName" => "Anthropogenic Biomes of the World, Version 2: 1700",
- "15. +: /Collection/LongName" => "Not provided",
- "16. -: /Collection/CollectionState" => "Final",
- "17. +: /Collection/CollectionState" => "NOT PROVIDED",
- "18. -: /Collection/Price" => "0",
- "19. +: /Collection/Price" => " 0.00",
- "20. -: /Collection/SpatialKeywords/Keyword[0]" => "Africa",
- "21. -: /Collection/SpatialKeywords/Keyword[1]" => "Asia",
- "22. +: /Collection/SpatialKeywords/Keyword[0]" => "AFRICA",
- "23. +: /Collection/SpatialKeywords/Keyword[1]" => "GAZA STRIP",
- "24. -: /Collection/Contacts/Contact[0]" => {
- "Role" => "Archive", "HoursOfService" => "9:00 A.M. to 5:00 P.M., Monday to Friday", "OrganizationName" => "Socioeconomic Data and Applications Center (SEDAC)", "OrganizationAddresses" => {
- "Address" => {
- "StreetAddress" => "CIESIN, Columbia University, 61 Route 9W, P.O. Box 1000", "City" => "Palisades", "StateProvince" => "NY", "PostalCode" => "10964", "Country" => "USA"
- }
- }, "OrganizationPhones" => {
- "Phone" => [{
- "Number" => "+1 845-365-8920",
- "Type" => "Telephone"
- }, {
- "Number" => "+1 845-365-8922",
- "Type" => "Fax"
- }]
- }, "OrganizationEmails" => {
- "Email" => "ciesin.info@ciesin.columbia.edu"
- }, "ContactPersons" => {
- "ContactPerson" => {
- "FirstName" => "SEDAC", "MiddleName" => "User", "LastName" => "Services"
- }
- }
- },
- "25. +: /Collection/Contacts/Contact[0]" => {
- "Role" => "PROCESSOR", "OrganizationName" => "SEDAC"
- },
- "26. +: /Collection/Contacts/Contact[1]" => {
- "Role" => "ARCHIVER", "OrganizationName" => "SEDAC"
- },
- "27. +: /Collection/Contacts/Contact[2]" => {
- "Role" => "ARCHIVER", "HoursOfService" => "9:00 A.M. to 5:00 P.M., Monday to Friday", "OrganizationName" => "Socioeconomic Data and Applications Center (SEDAC)", "OrganizationAddresses" => {
- "Address" => {
- "StreetAddress" => "CIESIN, Columbia University, 61 Route 9W, P.O. Box 1000", "City" => "Palisades", "StateProvince" => "NY", "PostalCode" => "10964", "Country" => "USA"
- }
- }, "OrganizationPhones" => {
- "Phone" => [{
- "Number" => "+1 845-365-8920",
- "Type" => "Telephone"
- }, {
- "Number" => "+1 845-365-8922",
- "Type" => "Fax"
- }]
- }, "OrganizationEmails" => {
- "Email" => "ciesin.info@ciesin.columbia.edu"
- }, "ContactPersons" => {
- "ContactPerson" => {
- "FirstName" => "SEDAC", "MiddleName" => "User", "LastName" => "Services", "JobPosition" => "TECHNICAL CONTACT"
- }
- }
- },
- "28. -: /Collection/SpatialInfo/SpatialCoverageType" => "Horizontal",
- "29. +: /Collection/SpatialInfo/SpatialCoverageType" => "HORIZONTAL",
- "30. -: /Collection/OnlineResources/OnlineResource/Type" => "DOI URL",
- "31. +: /Collection/OnlineResources/OnlineResource/Type" => "CollectionURL : DATA SET LANDING PAGE",
- "32. -: /Collection/Spatial/SpatialCoverageType" => "Horizontal",
- "33. +: /Collection/Spatial/SpatialCoverageType" => "HORIZONTAL",
- "34. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate" => "-180.000000",
- "35. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate" => "-180.0",
- "36. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate" => "90.000000",
- "37. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate" => "90.0",
- "38. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate" => "180.000000",
- "39. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate" => "180.0",
- "40. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate" => "-90.000000",
- "41. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate" => "-90.0"
-}
-end
-
-def iso_text_report
- 'application/iso:smap+xml
-
- 1. -: /DS_Series/schemaLocation
- 2. -: /DS_Series/seriesMetadata/MI_Metadata/fileIdentifier/FileName
- 3. -: /DS_Series/seriesMetadata/MI_Metadata/characterSet/MD_CharacterSetCode
- 4. -: /DS_Series/seriesMetadata/MI_Metadata/hierarchyLevel/MD_ScopeCode
- 5. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/attributeDescription
- 6. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/contentType
- 7. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/code/CharacterString
- 8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString
- 9. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardName/CharacterString
- 10. +: /DS_Series/seriesMetadata/MI_Metadata/dataQualityInfo/DQ_DataQuality/scope/DQ_Scope/level/MD_ScopeCode
- 11. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/id
- 12. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/code/CharacterString
- 13. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/codeSpace/CharacterString
- 14. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/description/CharacterString
- 15. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/instrument/nilReason
- 16. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardVersion/CharacterString
- 17. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]
- 18. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]
- 19. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[2]
- 20. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[3]
- 21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]
- 22. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[5]
- 23. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]
- 24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]
- 25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString
- 26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString
- 27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL
- 28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode
- 29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href
- 30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date
- 31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date'
-end
-
-def echo_text_report
- 'application/echo10+xml
-
- 1. -: /Collection/Orderable
- 2. -: /Collection/Visible
- 3. -: /Collection/MaintenanceAndUpdateFrequency
- 4. +: /Collection/Temporal/EndsAtPresentFlag
- 5. +: /Collection/Temporal/RangeDateTime/BeginningDateTime
- 6. +: /Collection/Platforms/Platform/ShortName
- 7. +: /Collection/Platforms/Platform/LongName
- 8. +: /Collection/Platforms/Platform/Type
- 9. -: /Collection/AssociatedDIFs/DIF/EntryId
- 10. -: /Collection/InsertTime
- 11. +: /Collection/InsertTime
- 12. -: /Collection/LastUpdate
- 13. +: /Collection/LastUpdate
- 14. -: /Collection/LongName
- 15. +: /Collection/LongName
- 16. -: /Collection/CollectionState
- 17. +: /Collection/CollectionState
- 18. -: /Collection/Price
- 19. +: /Collection/Price
- 20. -: /Collection/SpatialKeywords/Keyword[0]
- 21. -: /Collection/SpatialKeywords/Keyword[1]
- 22. +: /Collection/SpatialKeywords/Keyword[0]
- 23. +: /Collection/SpatialKeywords/Keyword[1]
- 24. -: /Collection/Contacts/Contact[0]
- 25. +: /Collection/Contacts/Contact[0]
- 26. +: /Collection/Contacts/Contact[1]
- 27. +: /Collection/Contacts/Contact[2]
- 28. -: /Collection/SpatialInfo/SpatialCoverageType
- 29. +: /Collection/SpatialInfo/SpatialCoverageType
- 30. -: /Collection/OnlineResources/OnlineResource/Type
- 31. +: /Collection/OnlineResources/OnlineResource/Type
- 32. -: /Collection/Spatial/SpatialCoverageType
- 33. +: /Collection/Spatial/SpatialCoverageType
- 34. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate
- 35. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate
- 36. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate
- 37. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate
- 38. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate
- 39. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate
- 40. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate
- 41. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate'
-end
-
-def dif_text_report
- 'application/dif10+xml
-
- 1. -: /DIF/Temporal_Coverage/Temporal_Range_Type
- 2. -: /DIF/Related_URL[1]
- 3. +: /DIF/Related_URL[1]'
-end
diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb
index 91abc19b6..8a1e89ccb 100644
--- a/spec/rails_helper.rb
+++ b/spec/rails_helper.rb
@@ -189,6 +189,7 @@
config.include Helpers::GroupHelper
config.include Helpers::IngestHelpers
config.include Helpers::Instrumentation
+ config.include Helpers::LossReportSamplesHelper
config.include Helpers::ProposalStatusHelper
config.include Helpers::SearchHelpers
config.include Helpers::SubscriptionHelpers
@@ -196,6 +197,7 @@
config.include Helpers::UmmTDraftHelpers
config.include Helpers::UserHelpers
+
# Precompile assets before running the test suite
# config.before(:suite) do
# Rails.application.load_tasks
diff --git a/spec/support/loss_report_samples_helper.rb b/spec/support/loss_report_samples_helper.rb
new file mode 100644
index 000000000..46865f046
--- /dev/null
+++ b/spec/support/loss_report_samples_helper.rb
@@ -0,0 +1,834 @@
+module Helpers
+ module LossReportSamplesHelper
+ def dif_id
+ 'C1200000031-SEDAC'
+ end
+
+ def iso_id
+ 'C1200000089-LARC'
+ end
+
+ def echo_id
+ 'C1200000040-SEDAC'
+ end
+
+ def iso_json_report
+ {
+ "format" => "application/iso:smap+xml",
+ "1. -: /DS_Series/schemaLocation" => "http://www.isotc211.org/2005/gmi http://cdn.earthdata.nasa.gov/iso/schema/1.0/ISO19115-2_EOS.xsd",
+ "2. -: /DS_Series/seriesMetadata/MI_Metadata/fileIdentifier/FileName" => "L4_SM_aup",
+ "3. -: /DS_Series/seriesMetadata/MI_Metadata/characterSet/MD_CharacterSetCode" => "utf8",
+ "4. -: /DS_Series/seriesMetadata/MI_Metadata/hierarchyLevel/MD_ScopeCode" => "series",
+ "5. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/attributeDescription" => nil,
+ "6. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/contentType" => nil,
+ "7. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/code/CharacterString" => "Not provided",
+ "8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString" => "gov.nasa.esdis.umm.processinglevelid",
+ "9. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardName/CharacterString" => "ISO 19115-2 Geographic information - Metadata - Part 2: Extensions for imagery and gridded data",
+ "10. +: /DS_Series/seriesMetadata/MI_Metadata/dataQualityInfo/DQ_DataQuality/scope/DQ_Scope/level/MD_ScopeCode" => "series",
+ "11. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/id" => "dba588298-ef6b-4e0f-9092-d1bfe87001ea",
+ "12. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/code/CharacterString" => "Not provided",
+ "13. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/codeSpace/CharacterString" => "gov.nasa.esdis.umm.platformshortname",
+ "14. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/description/CharacterString" => "Not provided",
+ "15. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/instrument/nilReason" => "inapplicable",
+ "16. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardVersion/CharacterString" => "ISO 19115-2:2009-02-15",
+ "17. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "Date" => "2016-04-29"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, "edition" => {
+ "CharacterString" => "Vv2010"
+ }, "identifier" => [{
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SPL4SMAU"
+ }, "codeSpace" => {
+ "CharacterString" => "http://gmao.gsfc.nasa.gov"
+ }, "description" => {
+ "CharacterString" => "The ECS Short Name"
+ }
+ }
+ }, {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "002"
+ }, "codeSpace" => {
+ "CharacterString" => "gov.nasa.esdis"
+ }, "description" => {
+ "CharacterString" => "The ECS Version ID"
+ }
+ }
+ }, {
+ "MD_Identifier" => {
+ "code" => {
+ "Anchor" => "doi:10.5067/JJY2V0GJNFRZ"
+ }, "codeSpace" => {
+ "CharacterString" => "gov.nasa.esdis"
+ }, "description" => {
+ "CharacterString" => "A Digital Object Identifier (DOI) that provides a persistent interoperable means to locate the SMAP Level 4 Radar data product."
+ }
+ }
+ }], "citedResponsibleParty" => [{
+ "CI_ResponsibleParty" => {
+ "organisationName" => {
+ "CharacterString" => "National Aeronautics and Space Administration"
+ }, "role" => {
+ "CI_RoleCode" => "resourceProvider"
+ }
+ }
+ }, {
+ "CI_ResponsibleParty" => {
+ "organisationName" => {
+ "CharacterString" => "Global Modeling and Assimilation Office"
+ }, "role" => {
+ "CI_RoleCode" => "originator"
+ }
+ }
+ }], "presentationForm" => {
+ "CI_PresentationFormCode" => "documentDigital"
+ }, "series" => {
+ "CI_Series" => {
+ "name" => {
+ "CharacterString" => "L4_SM"
+ }
+ }
+ }, "otherCitationDetails" => {
+ "CharacterString" => "The first Validated Release of the SMAP Level 4 Science Processing Software."
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
+ }, "purpose" => {
+ "CharacterString" => "The SMAP L4_SM data product provides spatially and temporally complete surface and root zone soil moisture information for science and applications users."
+ }, "credit" => {
+ "CharacterString" => "The software that generates the L4_SM data product and the data system that automates its production were designed and implemented at the NASA Global Modeling and Assimilation Office, Goddard Space Flight Center, Greenbelt, Maryland, USA."
+ }, "status" => {
+ "MD_ProgressCode" => "onGoing"
+ }, "pointOfContact" => {
+ "CI_ResponsibleParty" => {
+ "organisationName" => {
+ "CharacterString" => "PVC"
+ }, "role" => {
+ "CI_RoleCode" => "distributor"
+ }
+ }
+ }, "resourceMaintenance" => {
+ "MD_MaintenanceInformation" => {
+ "maintenanceAndUpdateFrequency" => {
+ "MD_MaintenanceFrequencyCode" => "As Needed"
+ }, "dateOfNextUpdate" => {
+ "Date" => "2016-11-01"
+ }, "updateScope" => {
+ "MD_ScopeCode" => "series"
+ }
+ }
+ }, "resourceFormat" => {
+ "MD_Format" => {
+ "name" => {
+ "CharacterString" => "HDF5"
+ }, "version" => {
+ "CharacterString" => "Version 1.8.9"
+ }
+ }
+ }, "descriptiveKeywords" => [{
+ "MD_Keywords" => {
+ "keyword" => [{
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT"
+ }, {
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > SURFACE SOIL MOISTURE"
+ }, {
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > ROOT ZONE SOIL MOISTURE"
+ }], "type" => {
+ "MD_KeywordTypeCode" => "theme"
+ }, "thesaurusName" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "NASA/GCMD Earth Science Keywords"
+ }, "date" => {
+ "gco:nilReason" => "missing"
+ }
+ }
+ }
+ }
+ }, {
+ "MD_Keywords" => {
+ "keyword" => {
+ "CharacterString" => "Earth Remote Sensing Instruments > Active Remote Sensing > NONE > SMAP L-BAND RADAR > SMAP L-Band Radar"
+ }, "type" => {
+ "MD_KeywordTypeCode" => "theme"
+ }, "thesaurusName" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "NASA/GCMD Earth Science Keywords"
+ }, "date" => {
+ "gco:nilReason" => "missing"
+ }
+ }
+ }
+ }
+ }, {
+ "MD_Keywords" => {
+ "keyword" => {
+ "CharacterString" => "Earth Observation Satellites > NASA Decadal Survey > SMAP > Soil Moisture Active and Passive Observatory"
+ }, "type" => {
+ "MD_KeywordTypeCode" => "theme"
+ }, "thesaurusName" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "NASA/GCMD Earth Science Keywords"
+ }, "date" => {
+ "gco:nilReason" => "missing"
+ }
+ }
+ }
+ }
+ }, {
+ "MD_Keywords" => {
+ "keyword" => {
+ "CharacterString" => "GEOGRAPHIC REGION > GLOBAL"
+ }, "type" => {
+ "MD_KeywordTypeCode" => "theme"
+ }, "thesaurusName" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "NASA/GCMD Earth Science Keywords"
+ }, "date" => {
+ "gco:nilReason" => "missing"
+ }
+ }
+ }
+ }
+ }], "aggregationInfo" => {
+ "MD_AggregateInformation" => {
+ "aggregateDataSetIdentifier" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SMAP"
+ }
+ }
+ }, "associationType" => {
+ "DS_AssociationTypeCode" => "largerWorkCitation"
+ }, "initiativeType" => {
+ "DS_InitiativeTypeCode" => "mission"
+ }
+ }
+ }, "language" => {
+ "CharacterString" => "eng"
+ }, "characterSet" => {
+ "MD_CharacterSetCode" => "utf8"
+ }, "topicCategory" => {
+ "MD_TopicCategoryCode" => "geoscientificInformation"
+ }, "environmentDescription" => {
+ "CharacterString" => "Data product generated by the SMAP mission in HDF5 format with metadata that conforms to the ISO 19115 model."
+ }, "extent" => {
+ "EX_Extent" => {
+ "description" => {
+ "CharacterString" => "Global land excluding inland water and permanent ice."
+ }, "geographicElement" => {
+ "EX_GeographicBoundingBox" => {
+ "extentTypeCode" => {
+ "Boolean" => "1"
+ }, "westBoundLongitude" => {
+ "Decimal" => "-180"
+ }, "eastBoundLongitude" => {
+ "Decimal" => "180"
+ }, "southBoundLatitude" => {
+ "Decimal" => "-85.04456"
+ }, "northBoundLatitude" => {
+ "Decimal" => "85.04456"
+ }
+ }
+ }, "temporalElement" => {
+ "EX_TemporalExtent" => {
+ "extent" => {
+ "TimePeriod" => {
+ "gml:id" => "swathTemporalExtent", "beginPosition" => "2015-03-31T01:30:00.000Z", "endPosition" => "2021-01-01T01:29:59.999Z"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "18. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "Soil Moisture Active Passive Mission Level 4 Surface and Root Zone Soil Moisture (L4_SM) Product Specification Document"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "Date" => "2015-10-31"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "publication"
+ }
+ }
+ }, "edition" => {
+ "CharacterString" => "1.4"
+ }, "identifier" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "L4_SM"
+ }, "codeSpace" => {
+ "CharacterString" => "http://gmao.gsfc.nasa.gov"
+ }, "description" => {
+ "CharacterString" => "A short name used by the Soil Moisture Active Passive (SMAP) mission to identify the Level 4 Radar product."
+ }
+ }
+ }, "presentationForm" => {
+ "CI_PresentationFormCode" => "documentDigital"
+ }, "series" => {
+ "CI_Series" => {
+ "name" => {
+ "CharacterString" => "L4_SM"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "19. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[2]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "DataSetId"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-12T11:50:19.050Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "DataSetId"
+ }, "aggregationInfo" => {
+ "MD_AggregateInformation" => {
+ "aggregateDataSetIdentifier" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002"
+ }
+ }
+ }, "associationType" => nil
+ }
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "20. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[3]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "InsertTime"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-08T09:16:24.835Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "creation"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "InsertTime"
+ }, "purpose" => {
+ "CharacterString" => "InsertTime"
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "UpdateTime"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-12T11:50:19.050Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "UpdateTime"
+ }, "purpose" => {
+ "CharacterString" => "UpdateTime"
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "22. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[5]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "DIFID"
+ }, "date" => {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-12T11:50:19.050Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, "identifier" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SPL4SMAU"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "DIFID"
+ }, "purpose" => {
+ "CharacterString" => "DIFID"
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "23. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update"
+ }, "date" => [{
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-04-29T00:00:00.000Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-12T11:50:19.050Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-08T09:16:24.835Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "creation"
+ }
+ }
+ }], "edition" => {
+ "CharacterString" => "Vv2010"
+ }, "identifier" => [{
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SPL4SMAU"
+ }, "description" => {
+ "CharacterString" => "The ECS Short Name"
+ }
+ }
+ }, {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "002"
+ }, "description" => {
+ "CharacterString" => "The ECS Version ID"
+ }
+ }
+ }, {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "doi:10.5067/JJY2V0GJNFRZ"
+ }, "codeSpace" => {
+ "CharacterString" => "gov.nasa.esdis.umm.doi"
+ }, "description" => {
+ "CharacterString" => "DOI"
+ }
+ }
+ }], "presentationForm" => {
+ "CI_PresentationFormCode" => "documentDigital"
+ }, "series" => {
+ "CI_Series" => {
+ "name" => {
+ "CharacterString" => "L4_SM"
+ }
+ }
+ }, "otherCitationDetails" => {
+ "CharacterString" => "The first Validated Release of the SMAP Level 4 Science Processing Software."
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
+ }, "purpose" => {
+ "gco:nilReason" => "missing", "CharacterString" => "The SMAP L4_SM data product provides spatially and temporally complete surface and root zone soil moisture information for science and applications users."
+ }, "status" => {
+ "MD_ProgressCode" => "onGoing"
+ }, "pointOfContact" => {
+ "CI_ResponsibleParty" => {
+ "organisationName" => {
+ "CharacterString" => "PVC"
+ }, "role" => {
+ "CI_RoleCode" => "distributor"
+ }
+ }
+ }, "descriptiveKeywords" => [{
+ "MD_Keywords" => {
+ "keyword" => [{
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > NONE"
+ }, {
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > SURFACE SOIL MOISTURE"
+ }, {
+ "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > ROOT ZONE SOIL MOISTURE"
+ }], "type" => {
+ "MD_KeywordTypeCode" => "theme"
+ }, "thesaurusName" => {
+ "gco:nilReason" => "unknown"
+ }
+ }
+ }, {
+ "MD_Keywords" => {
+ "keyword" => {
+ "CharacterString" => "Aircraft > Not provided > Not provided > "
+ }
+ }
+ }], "language" => {
+ "CharacterString" => "eng"
+ }, "topicCategory" => {
+ "MD_TopicCategoryCode" => "geoscientificInformation"
+ }, "extent" => {
+ "EX_Extent" => {
+ "geographicElement" => {
+ "EX_GeographicBoundingBox" => {
+ "extentTypeCode" => {
+ "Boolean" => "1"
+ }, "westBoundLongitude" => {
+ "Decimal" => "-180.0"
+ }, "eastBoundLongitude" => {
+ "Decimal" => "180.0"
+ }, "southBoundLatitude" => {
+ "Decimal" => "-85.04456"
+ }, "northBoundLatitude" => {
+ "Decimal" => "85.04456"
+ }
+ }
+ }, "temporalElement" => {
+ "EX_TemporalExtent" => {
+ "extent" => {
+ "TimePeriod" => {
+ "gml:id" => "dc46625fa-ae1e-4c95-a6ae-b15dd90fe8d3", "beginPosition" => "2015-03-31T01:30:00.000Z", "endPosition" => "2021-01-01T01:29:59.999Z"
+ }
+ }
+ }
+ }
+ }
+ }, "processingLevel" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "Not provided"
+ }, "codeSpace" => {
+ "CharacterString" => "gov.nasa.esdis.umm.processinglevelid"
+ }
+ }
+ }
+ }
+ },
+ "24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]" => {
+ "MD_DataIdentification" => {
+ "citation" => {
+ "CI_Citation" => {
+ "title" => {
+ "CharacterString" => "DataSetId"
+ }, "date" => [{
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-04-29T00:00:00.000Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-12T11:50:19.050Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "revision"
+ }
+ }
+ }, {
+ "CI_Date" => {
+ "date" => {
+ "DateTime" => "2016-09-08T09:16:24.835Z"
+ }, "dateType" => {
+ "CI_DateTypeCode" => "creation"
+ }
+ }
+ }], "citedResponsibleParty" => {
+ "CI_ResponsibleParty" => {
+ "organisationName" => {
+ "CharacterString" => "Global Modeling and Assimilation Office"
+ }, "role" => {
+ "CI_RoleCode" => "originator"
+ }
+ }
+ }
+ }
+ }, "abstract" => {
+ "CharacterString" => "DataSetId"
+ }, "resourceFormat" => {
+ "MD_Format" => {
+ "name" => {
+ "CharacterString" => "HDF5"
+ }, "version" => {
+ "gco:nilReason" => "unknown"
+ }
+ }
+ }, "aggregationInfo" => {
+ "MD_AggregateInformation" => {
+ "aggregateDataSetIdentifier" => {
+ "MD_Identifier" => {
+ "code" => {
+ "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002"
+ }
+ }
+ }, "associationType" => {
+ "DS_AssociationTypeCode" => "largerWorkCitation"
+ }
+ }
+ }, "language" => {
+ "CharacterString" => "eng"
+ }
+ }
+ },
+ "25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString" => "NSIDC DAAC > National Snow and Ice Data Center DAAC", "26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString" => "nsidc@nsidc.org", "27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL" => "http://nsidc.org/daac/", "28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode" => "pointOfContact", "29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href" => "#alaskaSARContact", "30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2016-04-29", "31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2013-01-02"
+ }
+ end
+
+ def dif_json_report
+ {
+ "format" => "application/dif10+xml",
+ "1. -: /DIF/Temporal_Coverage/Temporal_Range_Type" => "Long Range",
+ "2. -: /DIF/Related_URL[1]" => {
+ "URL_Content_Type" => {
+ "Type" => "VIEW DATA SET LANDING PAGE"
+ }, "URL" => "http://dx.doi.org/10.7927/H4NK3BZJ", "Description" => "data set DOI and homepage"
+ },
+ "3. +: /DIF/Related_URL[1]" => {
+ "URL_Content_Type" => {
+ "Type" => "DATA SET LANDING PAGE"
+ }, "URL" => "http://dx.doi.org/10.7927/H4NK3BZJ", "Description" => "data set DOI and homepage"
+ }
+ }
+ end
+
+ def echo_json_report
+ {
+ "format" => "application/echo10+xml",
+ "1. -: /Collection/Orderable" => "true",
+ "2. -: /Collection/Visible" => "true",
+ "3. -: /Collection/MaintenanceAndUpdateFrequency" => "As needed",
+ "4. +: /Collection/Temporal/EndsAtPresentFlag" => "false",
+ "5. +: /Collection/Temporal/RangeDateTime/BeginningDateTime" => "1970-01-01T00:00:00.000Z",
+ "6. +: /Collection/Platforms/Platform/ShortName" => "Not provided",
+ "7. +: /Collection/Platforms/Platform/LongName" => "Not provided",
+ "8. +: /Collection/Platforms/Platform/Type" => "Not provided",
+ "9. -: /Collection/AssociatedDIFs/DIF/EntryId" => "CIESIN_SEDAC_ANTHROMES_v2_1700",
+ "10. -: /Collection/InsertTime" => "2014-05-13T00:00:00Z",
+ "11. +: /Collection/InsertTime" => "2014-05-13T00:00:00.000Z",
+ "12. -: /Collection/LastUpdate" => "2015-08-04T00:00:00Z",
+ "13. +: /Collection/LastUpdate" => "2015-08-04T00:00:00.000Z",
+ "14. -: /Collection/LongName" => "Anthropogenic Biomes of the World, Version 2: 1700",
+ "15. +: /Collection/LongName" => "Not provided",
+ "16. -: /Collection/CollectionState" => "Final",
+ "17. +: /Collection/CollectionState" => "NOT PROVIDED",
+ "18. -: /Collection/Price" => "0",
+ "19. +: /Collection/Price" => " 0.00",
+ "20. -: /Collection/SpatialKeywords/Keyword[0]" => "Africa",
+ "21. -: /Collection/SpatialKeywords/Keyword[1]" => "Asia",
+ "22. +: /Collection/SpatialKeywords/Keyword[0]" => "AFRICA",
+ "23. +: /Collection/SpatialKeywords/Keyword[1]" => "GAZA STRIP",
+ "24. -: /Collection/Contacts/Contact[0]" => {
+ "Role" => "Archive", "HoursOfService" => "9:00 A.M. to 5:00 P.M., Monday to Friday", "OrganizationName" => "Socioeconomic Data and Applications Center (SEDAC)", "OrganizationAddresses" => {
+ "Address" => {
+ "StreetAddress" => "CIESIN, Columbia University, 61 Route 9W, P.O. Box 1000", "City" => "Palisades", "StateProvince" => "NY", "PostalCode" => "10964", "Country" => "USA"
+ }
+ }, "OrganizationPhones" => {
+ "Phone" => [{
+ "Number" => "+1 845-365-8920",
+ "Type" => "Telephone"
+ }, {
+ "Number" => "+1 845-365-8922",
+ "Type" => "Fax"
+ }]
+ }, "OrganizationEmails" => {
+ "Email" => "ciesin.info@ciesin.columbia.edu"
+ }, "ContactPersons" => {
+ "ContactPerson" => {
+ "FirstName" => "SEDAC", "MiddleName" => "User", "LastName" => "Services"
+ }
+ }
+ },
+ "25. +: /Collection/Contacts/Contact[0]" => {
+ "Role" => "PROCESSOR", "OrganizationName" => "SEDAC"
+ },
+ "26. +: /Collection/Contacts/Contact[1]" => {
+ "Role" => "ARCHIVER", "OrganizationName" => "SEDAC"
+ },
+ "27. +: /Collection/Contacts/Contact[2]" => {
+ "Role" => "ARCHIVER", "HoursOfService" => "9:00 A.M. to 5:00 P.M., Monday to Friday", "OrganizationName" => "Socioeconomic Data and Applications Center (SEDAC)", "OrganizationAddresses" => {
+ "Address" => {
+ "StreetAddress" => "CIESIN, Columbia University, 61 Route 9W, P.O. Box 1000", "City" => "Palisades", "StateProvince" => "NY", "PostalCode" => "10964", "Country" => "USA"
+ }
+ }, "OrganizationPhones" => {
+ "Phone" => [{
+ "Number" => "+1 845-365-8920",
+ "Type" => "Telephone"
+ }, {
+ "Number" => "+1 845-365-8922",
+ "Type" => "Fax"
+ }]
+ }, "OrganizationEmails" => {
+ "Email" => "ciesin.info@ciesin.columbia.edu"
+ }, "ContactPersons" => {
+ "ContactPerson" => {
+ "FirstName" => "SEDAC", "MiddleName" => "User", "LastName" => "Services", "JobPosition" => "TECHNICAL CONTACT"
+ }
+ }
+ },
+ "28. -: /Collection/SpatialInfo/SpatialCoverageType" => "Horizontal",
+ "29. +: /Collection/SpatialInfo/SpatialCoverageType" => "HORIZONTAL",
+ "30. -: /Collection/OnlineResources/OnlineResource/Type" => "DOI URL",
+ "31. +: /Collection/OnlineResources/OnlineResource/Type" => "CollectionURL : DATA SET LANDING PAGE",
+ "32. -: /Collection/Spatial/SpatialCoverageType" => "Horizontal",
+ "33. +: /Collection/Spatial/SpatialCoverageType" => "HORIZONTAL",
+ "34. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate" => "-180.000000",
+ "35. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate" => "-180.0",
+ "36. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate" => "90.000000",
+ "37. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate" => "90.0",
+ "38. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate" => "180.000000",
+ "39. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate" => "180.0",
+ "40. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate" => "-90.000000",
+ "41. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate" => "-90.0"
+ }
+ end
+
+ def iso_text_report
+ 'application/iso:smap+xml
+
+ 1. -: /DS_Series/schemaLocation
+ 2. -: /DS_Series/seriesMetadata/MI_Metadata/fileIdentifier/FileName
+ 3. -: /DS_Series/seriesMetadata/MI_Metadata/characterSet/MD_CharacterSetCode
+ 4. -: /DS_Series/seriesMetadata/MI_Metadata/hierarchyLevel/MD_ScopeCode
+ 5. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/attributeDescription
+ 6. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/contentType
+ 7. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/code/CharacterString
+ 8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString
+ 9. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardName/CharacterString
+ 10. +: /DS_Series/seriesMetadata/MI_Metadata/dataQualityInfo/DQ_DataQuality/scope/DQ_Scope/level/MD_ScopeCode
+ 11. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/id
+ 12. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/code/CharacterString
+ 13. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/codeSpace/CharacterString
+ 14. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/description/CharacterString
+ 15. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/instrument/nilReason
+ 16. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardVersion/CharacterString
+ 17. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]
+ 18. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]
+ 19. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[2]
+ 20. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[3]
+ 21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]
+ 22. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[5]
+ 23. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]
+ 24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]
+ 25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString
+ 26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString
+ 27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL
+ 28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode
+ 29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href
+ 30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date
+ 31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date'
+ end
+
+ def echo_text_report
+ 'application/echo10+xml
+
+ 1. -: /Collection/Orderable
+ 2. -: /Collection/Visible
+ 3. -: /Collection/MaintenanceAndUpdateFrequency
+ 4. +: /Collection/Temporal/EndsAtPresentFlag
+ 5. +: /Collection/Temporal/RangeDateTime/BeginningDateTime
+ 6. +: /Collection/Platforms/Platform/ShortName
+ 7. +: /Collection/Platforms/Platform/LongName
+ 8. +: /Collection/Platforms/Platform/Type
+ 9. -: /Collection/AssociatedDIFs/DIF/EntryId
+ 10. -: /Collection/InsertTime
+ 11. +: /Collection/InsertTime
+ 12. -: /Collection/LastUpdate
+ 13. +: /Collection/LastUpdate
+ 14. -: /Collection/LongName
+ 15. +: /Collection/LongName
+ 16. -: /Collection/CollectionState
+ 17. +: /Collection/CollectionState
+ 18. -: /Collection/Price
+ 19. +: /Collection/Price
+ 20. -: /Collection/SpatialKeywords/Keyword[0]
+ 21. -: /Collection/SpatialKeywords/Keyword[1]
+ 22. +: /Collection/SpatialKeywords/Keyword[0]
+ 23. +: /Collection/SpatialKeywords/Keyword[1]
+ 24. -: /Collection/Contacts/Contact[0]
+ 25. +: /Collection/Contacts/Contact[0]
+ 26. +: /Collection/Contacts/Contact[1]
+ 27. +: /Collection/Contacts/Contact[2]
+ 28. -: /Collection/SpatialInfo/SpatialCoverageType
+ 29. +: /Collection/SpatialInfo/SpatialCoverageType
+ 30. -: /Collection/OnlineResources/OnlineResource/Type
+ 31. +: /Collection/OnlineResources/OnlineResource/Type
+ 32. -: /Collection/Spatial/SpatialCoverageType
+ 33. +: /Collection/Spatial/SpatialCoverageType
+ 34. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate
+ 35. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate
+ 36. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate
+ 37. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate
+ 38. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate
+ 39. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate
+ 40. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate
+ 41. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate'
+ end
+
+ def dif_text_report
+ 'application/dif10+xml
+
+ 1. -: /DIF/Temporal_Coverage/Temporal_Range_Type
+ 2. -: /DIF/Related_URL[1]
+ 3. +: /DIF/Related_URL[1]'
+ end
+ end
+end
From 34891ecf57d32ea222a90c1df67f3df8bfb51dd3 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Fri, 31 Jul 2020 13:45:59 -0400
Subject: [PATCH 37/49] MMT-2313: made changes per PR change requests
---
app/controllers/collections_controller.rb | 27 ++++++++--
app/helpers/loss_report_helper.rb | 41 +++++----------
app/views/collections/loss_report.html.erb | 51 -------------------
spec/features/collections/loss_report_spec.rb | 3 +-
spec/helpers/loss_report_helper_spec.rb | 13 +++--
spec/support/loss_report_samples_helper.rb | 8 ++-
6 files changed, 53 insertions(+), 90 deletions(-)
delete mode 100644 app/views/collections/loss_report.html.erb
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index 78e9cfaf5..f0c884d5a 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -5,6 +5,7 @@ class CollectionsController < ManageCollectionsController
include LossReportHelper
before_action :set_collection
+ before_action :prepare_translated_collections
before_action :ensure_correct_collection_provider, only: [:edit, :clone, :revert, :destroy]
layout 'collection_preview', only: [:show]
@@ -120,10 +121,9 @@ def loss_report
# When a user wants to use MMT to edit metadata that currently exists in a non-UMM form,
# it's important that they're able to see if any data loss occurs in the translation to umm.
# This method is needed to reference the appropriate helper and view for the lossiness report
- concept_id = params[:id]
respond_to do |format|
- format.text { render plain: loss_report_output(concept_id, hide_items=true, disp='text') }
- format.json { render json: JSON.pretty_generate(loss_report_output(concept_id, hide_items=false, disp='json')) }
+ format.text { render plain: loss_report_output(hide_items=true, disp='text') }
+ format.json { render json: JSON.pretty_generate(loss_report_output(hide_items=false, disp='json')) }
end
end
@@ -139,6 +139,27 @@ def ensure_correct_collection_provider
render :show
end
+ def prepare_translated_collections
+ original_collection_native_xml = cmr_client.get_concept(params[:id],token, {})
+ original_collection_native_xml.success? ? @collection_error = false : @collection_error = true
+
+ @content_type = original_collection_native_xml.headers.fetch('content-type').split(';')[0]
+ @collection_error = true if @content_type.include?('application/vnd.nasa.cmr.umm+json;version=')
+
+ @original_collection_native_hash = Hash.from_xml(original_collection_native_xml.body)
+
+ translated_collection_umm_json = cmr_client.translate_collection(original_collection_native_xml.body, @content_type, "application/#{Rails.configuration.umm_c_version}; charset=utf-8", skip_validation=true)
+ @collection_error = true if !translated_collection_umm_json.success?
+
+ translated_collection_native_xml = cmr_client.translate_collection(JSON.pretty_generate(translated_collection_umm_json.body), "application/#{Rails.configuration.umm_c_version}; charset=utf-8", @content_type, skip_validation=true)
+ @collection_error = true if !translated_collection_native_xml.success?
+
+ @translated_collection_native_hash = Hash.from_xml(translated_collection_native_xml.body)
+
+ @original_collection_native_xml = original_collection_native_xml.body
+ @translated_collection_native_xml = translated_collection_native_xml.body
+ end
+
def set_collection
@concept_id = params[:id]
@revision_id = params[:revision_id]
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index e17a00e80..39b3acfdd 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -12,18 +12,23 @@ def token
end
end
- def loss_report_output(concept_id, hide_items=true, disp='text')
+ def loss_report_output(hide_items=true, disp='text')
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
- # prepare_collections returns false when the cmr_client endpoints are unsuccessfully executed
- if (collections = prepare_collections(concept_id, '1.15.3'))
- orig_xml,conv_xml,orig_h,conv_h,content_type = collections
+ # @collection_error is true if there is an error in the translation that is performed by prepare_collections in the collections_controller
+ if !@collection_error
+ orig_xml,conv_xml = @original_collection_native_xml, @translated_collection_native_xml
+ orig_h,conv_h = @original_collection_native_hash, @translated_collection_native_hash
else
return 'Failure to get_concept or translate_collection' if disp == 'text'
return {"error"=>"Failure to get_concept or translate_collection"} if disp == 'json'
end
- if content_type.include?('iso') || content_type.include?('dif')
+ # ISO and DIF collections (in XML form) contain namespaces that cause errors in the below comparison.
+ # Specifically, when nodes are evaluated individually, (their namespace definitions remaining at the top of the xml)
+ # their prefixes are undefined in the scope of the evaluation and therefore raise errors. Removing the namespaces
+ # eliminates this issue.
+ if @content_type.include?('iso') || @content_type.include?('dif')
orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks } .remove_namespaces!
conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks } .remove_namespaces!
else
@@ -31,11 +36,6 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks }
end
- #write files to test that all changes are being found with opendiff
- dir = '/Users/ctrummer/Documents/devtesting'
- File.write("#{dir}/o_#{concept_id}.xml", orig.to_xml)
- File.write("#{dir}/c_#{concept_id}.xml", conv.to_xml)
-
arr_paths = Array.new # This array is used to keep track of the paths that lead to arrays that have already been mapped
text_output = String.new if disp == 'text'
json_output = Hash.new if disp == 'json'
@@ -45,8 +45,8 @@ def loss_report_output(concept_id, hide_items=true, disp='text')
# json_output['conv'] = conv_h if disp == 'json'
# text_output += orig_xml if disp == 'text'
- json_output['format'] = content_type if disp == 'json'
- text_output += (content_type + "\n\n") if disp == 'text'
+ json_output['format'] = @content_type if disp == 'json'
+ text_output += (@content_type + "\n\n") if disp == 'text'
orig.diff(conv, {:added => true, :removed => true}) do |change,node|
element = node.to_xml
@@ -150,23 +150,6 @@ def hash_map(hash)
buckets
end
- def prepare_collections(concept_id, umm_c_version)
- original_collection_native_xml = cmr_client.get_concept(concept_id,token, {})
- return false if !original_collection_native_xml.success?
-
- content_type = original_collection_native_xml.headers.fetch('content-type').split(';')[0]
- original_collection_native_hash = Hash.from_xml(original_collection_native_xml.body)
- translated_collection_umm_json = cmr_client.translate_collection(original_collection_native_xml.body, content_type, "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", skip_validation=true)
- return false if !translated_collection_umm_json.success?
-
- translated_collection_native_xml = cmr_client.translate_collection(JSON.pretty_generate(translated_collection_umm_json.body), "application/vnd.nasa.cmr.umm+json;version=#{umm_c_version}", content_type, skip_validation=true)
- return false if !translated_collection_native_xml.success?
-
- translated_collection_native_hash = Hash.from_xml(translated_collection_native_xml.body)
- return original_collection_native_xml.body, translated_collection_native_xml.body, original_collection_native_hash, translated_collection_native_hash, content_type
- end
-
-
def hash_navigation(path, hash)
# Passed a path string and the hash being navigated. This method parses the path string and
# returns the array/value at the end of the path
diff --git a/app/views/collections/loss_report.html.erb b/app/views/collections/loss_report.html.erb
deleted file mode 100644
index f27bbf9f0..000000000
--- a/app/views/collections/loss_report.html.erb
+++ /dev/null
@@ -1,51 +0,0 @@
-
-
-
- | <%= 'Alteration' %> |
- <%= 'Node' %> |
- <%= 'Path' %> |
-
-
-
-
- <% orig,conv,orig_h,conv_h = prepare_collections('C1200000063-LARC', 'echo10', '1.15.3') %>
- <% orig = Nokogiri::XML(orig) { |config| config.strict.noblanks } %>
- <% conv = Nokogiri::XML(conv) { |config| config.strict.noblanks } %>
- <% ignored_paths = Array.new %>
- <% counter = 0 %>
- <% orig.diff(conv, {:added => true, :removed => true}) do |change,node| %>
- <% if node.parent.path.include?('[') && !ignored_paths.include?(node.parent.path.split('[')[0]) %>
- <% ignored_paths << node.parent.path.split('[')[0] %>
- <% array_comparison(node.parent.path.split('[')[0], orig_h, conv_h).each do |item| %>
-
- |
- <%= item[0] %>
- |
-
- <%= counter %>
- <%= item[1] %>
- |
-
- <%= item[2] %>
- |
-
- <% counter += 1%>
- <% end %>
- <% elsif !ignored_paths.include?(node.parent.path.split('[')[0]) && !path_leads_to_list?(node.parent.path, orig_h, conv_h) %>
-
- |
- <%= change %>
- |
-
- <%= counter %>
- <%= node.to_html %>
- |
-
- <%= node.parent.path %>
- |
-
- <% counter += 1%>
- <% end %>
- <% end %>
-
-
diff --git a/spec/features/collections/loss_report_spec.rb b/spec/features/collections/loss_report_spec.rb
index 14e503ff3..6c84110af 100644
--- a/spec/features/collections/loss_report_spec.rb
+++ b/spec/features/collections/loss_report_spec.rb
@@ -1,6 +1,5 @@
-require 'rails_helper'
-describe 'Displaying the comparison report in browser', js: true do
+describe 'Displaying the comparison report in browser' do
context 'when accessing the comparison report' do
diff --git a/spec/helpers/loss_report_helper_spec.rb b/spec/helpers/loss_report_helper_spec.rb
index a48655938..79b1a0e9c 100644
--- a/spec/helpers/loss_report_helper_spec.rb
+++ b/spec/helpers/loss_report_helper_spec.rb
@@ -1,6 +1,5 @@
-require 'rails_helper'
-describe 'Loss Report Helper', js: true do
+describe 'Loss Report Helper' do
let(:umm_c_version) { '1.15.3' }
context '#prepare_collections' do
@@ -38,12 +37,18 @@
expect(helper.loss_report_output(iso_id).gsub(/\s+/, "")).to eql(iso_text_report.gsub(/\s+/, ""))
end
it 'successfully produces a json loss report' do
- expect(helper.loss_report_output(iso_id, hide_items=false, disp='json')).to have_key("31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date")
+ report = helper.loss_report_output(iso_id, hide_items=false, disp='json')
+ expect(report.keys.length).to be(32)
+ expect(report).to have_key('8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString')
+ expect(report).to have_key('21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]')
+ expect(report).to have_key('24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]')
+ expect(report).to have_key('31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date')
end
end
# the reason the iso example only checks the last key (instead of verifying the full report) is that cmr adds/updates an 'id' attribute
# in the actual collection (every time it is translated) and therefore the the comparison report will always include this change
# except with a different value for the 'id' attribute. This would cause the equality between the hashes to evaluate false and fail the
- # test every time. Checking the last change is a comparible solution because it
+ # test every time. Spot checking the output is a comparable solution because any small addition/removal should throw off the numbering system,
+ # +/- symbol, or path, and this test will fail.
end
end
diff --git a/spec/support/loss_report_samples_helper.rb b/spec/support/loss_report_samples_helper.rb
index 46865f046..de5a38c4e 100644
--- a/spec/support/loss_report_samples_helper.rb
+++ b/spec/support/loss_report_samples_helper.rb
@@ -629,7 +629,13 @@ def iso_json_report
}
}
},
- "25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString" => "NSIDC DAAC > National Snow and Ice Data Center DAAC", "26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString" => "nsidc@nsidc.org", "27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL" => "http://nsidc.org/daac/", "28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode" => "pointOfContact", "29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href" => "#alaskaSARContact", "30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2016-04-29", "31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2013-01-02"
+ "25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString" => "NSIDC DAAC > National Snow and Ice Data Center DAAC",
+ "26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString" => "nsidc@nsidc.org",
+ "27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL" => "http://nsidc.org/daac/",
+ "28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode" => "pointOfContact",
+ "29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href" => "#alaskaSARContact",
+ "30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2016-04-29",
+ "31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2013-01-02"
}
end
From 758e8fa24c0eddc6713bd7ad395d2967cfeecbd6 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Fri, 31 Jul 2020 14:19:22 -0400
Subject: [PATCH 38/49] MMT-2313: removed unneeded comment
---
app/helpers/loss_report_helper.rb | 5 -----
1 file changed, 5 deletions(-)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 39b3acfdd..3dba501ac 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -40,11 +40,6 @@ def loss_report_output(hide_items=true, disp='text')
text_output = String.new if disp == 'text'
json_output = Hash.new if disp == 'json'
- # json_output['orig'] = hash_map(orig_h) if disp == 'json'
- # json_output['orig'] = orig_h if disp == 'json'
- # json_output['conv'] = conv_h if disp == 'json'
- # text_output += orig_xml if disp == 'text'
-
json_output['format'] = @content_type if disp == 'json'
text_output += (@content_type + "\n\n") if disp == 'text'
From aa3e768a4b2dca2f7ee0b973b2e966d50160e88e Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Fri, 31 Jul 2020 16:47:33 -0400
Subject: [PATCH 39/49] MMT-2313: removed an unecessary cmr call
---
app/controllers/collections_controller.rb | 7 +------
app/helpers/loss_report_helper.rb | 4 ++--
2 files changed, 3 insertions(+), 8 deletions(-)
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index f0c884d5a..adc03faf1 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -145,15 +145,10 @@ def prepare_translated_collections
@content_type = original_collection_native_xml.headers.fetch('content-type').split(';')[0]
@collection_error = true if @content_type.include?('application/vnd.nasa.cmr.umm+json;version=')
-
@original_collection_native_hash = Hash.from_xml(original_collection_native_xml.body)
- translated_collection_umm_json = cmr_client.translate_collection(original_collection_native_xml.body, @content_type, "application/#{Rails.configuration.umm_c_version}; charset=utf-8", skip_validation=true)
- @collection_error = true if !translated_collection_umm_json.success?
-
- translated_collection_native_xml = cmr_client.translate_collection(JSON.pretty_generate(translated_collection_umm_json.body), "application/#{Rails.configuration.umm_c_version}; charset=utf-8", @content_type, skip_validation=true)
+ translated_collection_native_xml = cmr_client.translate_collection(JSON.pretty_generate(@collection), "application/#{Rails.configuration.umm_c_version}; charset=utf-8", @content_type, skip_validation=true)
@collection_error = true if !translated_collection_native_xml.success?
-
@translated_collection_native_hash = Hash.from_xml(translated_collection_native_xml.body)
@original_collection_native_xml = original_collection_native_xml.body
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 3dba501ac..be531dc22 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -51,12 +51,12 @@ def loss_report_output(hide_items=true, disp='text')
if arr_path && path_not_checked?(arr_path, arr_paths)
arr_paths << arr_path
array_comparison(arr_path, orig_h, conv_h).each { |item| add_to_report(item[0], item[1], item[2], hide_items, disp, json_output, text_output) }
- elsif path_not_checked?(path, arr_paths) # nokogiri
+ elsif path_not_checked?(path, arr_paths)
if is_xml?(node)
element = Hash.from_xml(element)
hash_map(element).each do |item|
arr_path = top_level_arr_path("#{path}/#{item['path']}", orig_h, conv_h)
- if arr_path && path_not_checked?("#{path}/#{item['path']}", arr_paths) # all list
+ if arr_path && path_not_checked?("#{path}/#{item['path']}", arr_paths)
if path_not_checked?(arr_path, arr_paths)
arr_paths << arr_path
array_comparison(arr_path, orig_h, conv_h).each { |item| add_to_report(item[0], item[1], item[2], hide_items, disp, json_output, text_output) }
From 24093ec75882746baec5bd86186c7693871b476e Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Thu, 6 Aug 2020 12:40:22 -0400
Subject: [PATCH 40/49] MMT-2313: made changes per PR change requests. Also
removed loss_report_helper_spec.rb and loss_report_samples_helper.rb
---
app/controllers/collections_controller.rb | 30 +-
app/helpers/loss_report_helper.rb | 98 +-
config/routes.rb | 2 +-
spec/features/collections/loss_report_spec.rb | 48 +-
.../loss_report_dif_sample.json | 18 +
.../loss_report_dif_sample.text | 5 +
.../loss_report_echo_sample.json | 121 +++
.../loss_report_echo_sample.text | 43 +
.../loss_report_iso_sample.json | 793 +++++++++++++++++
.../loss_report_iso_sample.text | 33 +
spec/helpers/loss_report_helper_spec.rb | 54 --
spec/rails_helper.rb | 1 -
spec/support/loss_report_samples_helper.rb | 840 ------------------
13 files changed, 1108 insertions(+), 978 deletions(-)
create mode 100644 spec/fixtures/loss_report_samples/loss_report_dif_sample.json
create mode 100644 spec/fixtures/loss_report_samples/loss_report_dif_sample.text
create mode 100644 spec/fixtures/loss_report_samples/loss_report_echo_sample.json
create mode 100644 spec/fixtures/loss_report_samples/loss_report_echo_sample.text
create mode 100644 spec/fixtures/loss_report_samples/loss_report_iso_sample.json
create mode 100644 spec/fixtures/loss_report_samples/loss_report_iso_sample.text
delete mode 100644 spec/helpers/loss_report_helper_spec.rb
delete mode 100644 spec/support/loss_report_samples_helper.rb
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index adc03faf1..948abded7 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -120,10 +120,13 @@ def create_update_proposal
def loss_report
# When a user wants to use MMT to edit metadata that currently exists in a non-UMM form,
# it's important that they're able to see if any data loss occurs in the translation to umm.
- # This method is needed to reference the appropriate helper and view for the lossiness report
+ # This method is needed to reference the appropriate helper and view for the lossiness report.
+ # If compared_collections is false, the error message will appear. Note that compared_collections
+ # is false when the cmr calls aren't successful.
+ compared_collections = prepare_translated_collections
respond_to do |format|
- format.text { render plain: loss_report_output(hide_items=true, disp='text') }
- format.json { render json: JSON.pretty_generate(loss_report_output(hide_items=false, disp='json')) }
+ format.text { render plain: JSON.pretty_generate(@collection) + (compared_collections ? loss_report_output(compared_collections, hide_items: true, display: 'text') : 'Failure to get_concept or translate_collection' )}
+ format.json { render json: JSON.pretty_generate(compared_collections ? loss_report_output(compared_collections, hide_items: false, display: 'json') : {"error"=>"Failure to get_concept or translate_collection"}) }
end
end
@@ -141,18 +144,21 @@ def ensure_correct_collection_provider
def prepare_translated_collections
original_collection_native_xml = cmr_client.get_concept(params[:id],token, {})
- original_collection_native_xml.success? ? @collection_error = false : @collection_error = true
+ return false if !original_collection_native_xml.success?
- @content_type = original_collection_native_xml.headers.fetch('content-type').split(';')[0]
- @collection_error = true if @content_type.include?('application/vnd.nasa.cmr.umm+json;version=')
- @original_collection_native_hash = Hash.from_xml(original_collection_native_xml.body)
+ content_type = original_collection_native_xml.headers.fetch('content-type').split(';')[0]
+ return false if content_type.include?('application/vnd.nasa.cmr.umm+json;version=')
- translated_collection_native_xml = cmr_client.translate_collection(JSON.pretty_generate(@collection), "application/#{Rails.configuration.umm_c_version}; charset=utf-8", @content_type, skip_validation=true)
- @collection_error = true if !translated_collection_native_xml.success?
- @translated_collection_native_hash = Hash.from_xml(translated_collection_native_xml.body)
+ translated_collection_native_xml = cmr_client.translate_collection(JSON.pretty_generate(@collection), "application/#{Rails.configuration.umm_c_version}; charset=utf-8", content_type, skip_validation=true)
+ return false if !translated_collection_native_xml.success?
- @original_collection_native_xml = original_collection_native_xml.body
- @translated_collection_native_xml = translated_collection_native_xml.body
+ return {
+ original_collection_native_xml: original_collection_native_xml.body,
+ translated_collection_native_xml: translated_collection_native_xml.body,
+ original_collection_native_hash: Hash.from_xml(original_collection_native_xml.body),
+ translated_collection_native_hash: Hash.from_xml(translated_collection_native_xml.body),
+ native_format: content_type
+ }
end
def set_collection
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index be531dc22..af09337dc 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -1,93 +1,89 @@
module LossReportHelper
- def cmr_client
- Cmr::Client.client_for_environment(Rails.configuration.cmr_env, Rails.configuration.services)
- end
-
- def token
- if session[:login_method] == 'launchpad'
- session[:launchpad_cookie]
- elsif session[:login_method] == 'urs'
- session[:access_token]
- end
- end
-
- def loss_report_output(hide_items=true, disp='text')
+ def loss_report_output(compared_collections, hide_items: true, display: 'text')
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
+ # this display feature could be a good candidate for dependency injection
- # @collection_error is true if there is an error in the translation that is performed by prepare_collections in the collections_controller
- if !@collection_error
- orig_xml,conv_xml = @original_collection_native_xml, @translated_collection_native_xml
- orig_h,conv_h = @original_collection_native_hash, @translated_collection_native_hash
- else
- return 'Failure to get_concept or translate_collection' if disp == 'text'
- return {"error"=>"Failure to get_concept or translate_collection"} if disp == 'json'
- end
+ orig_h = compared_collections[:original_collection_native_hash]
+ conv_h = compared_collections[:translated_collection_native_hash]
# ISO and DIF collections (in XML form) contain namespaces that cause errors in the below comparison.
# Specifically, when nodes are evaluated individually, (their namespace definitions remaining at the top of the xml)
# their prefixes are undefined in the scope of the evaluation and therefore raise errors. Removing the namespaces
# eliminates this issue.
- if @content_type.include?('iso') || @content_type.include?('dif')
- orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks } .remove_namespaces!
- conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks } .remove_namespaces!
+ if compared_collections[:native_format].include?('iso') || compared_collections[:native_format].include?('dif')
+ orig = Nokogiri::XML(compared_collections[:original_collection_native_xml]) { |config| config.strict.noblanks }.remove_namespaces!
+ conv = Nokogiri::XML(compared_collections[:translated_collection_native_xml]) { |config| config.strict.noblanks }.remove_namespaces!
else
- orig = Nokogiri::XML(orig_xml) { |config| config.strict.noblanks }
- conv = Nokogiri::XML(conv_xml) { |config| config.strict.noblanks }
+ orig = Nokogiri::XML(compared_collections[:original_collection_native_xml]) { |config| config.strict.noblanks }
+ conv = Nokogiri::XML(compared_collections[:translated_collection_native_xml]) { |config| config.strict.noblanks }
end
arr_paths = Array.new # This array is used to keep track of the paths that lead to arrays that have already been mapped
- text_output = String.new if disp == 'text'
- json_output = Hash.new if disp == 'json'
- json_output['format'] = @content_type if disp == 'json'
- text_output += (@content_type + "\n\n") if disp == 'text'
+ if display == 'text'
+ text_output = String.new
+ text_output += (compared_collections[:native_format] + "\n\n")
+ elsif display == 'json'
+ json_output = Hash.new
+ json_output['format'] = compared_collections[:native_format]
+ end
+ # Below is the Nokogiri#diff method that is used to compare Nokogiri::XML objects.
+ # The 'change' item is either '+' or '-'; the 'node' item is the Nokogiri::XML::Node object
orig.diff(conv, {:added => true, :removed => true}) do |change,node|
+
element = node.to_xml
path = node.parent.path.split('[')[0]
arr_path = top_level_arr_path(path, orig_h, conv_h)
+ # the first layer of the following if/else structure is used to separately evaluate explicit array changes.
+ # This is why arr_path will evaluate true if the element in question is an array
if arr_path && path_not_checked?(arr_path, arr_paths)
arr_paths << arr_path
- array_comparison(arr_path, orig_h, conv_h).each { |item| add_to_report(item[0], item[1], item[2], hide_items, disp, json_output, text_output) }
- elsif path_not_checked?(path, arr_paths)
- if is_xml?(node)
+ array_comparison(arr_path, orig_h, conv_h).each { |item| add_to_report(item[0], item[1], item[2], hide_items, display, json_output, text_output) }
+ elsif path_not_checked?(path, arr_paths)
+ # this layer of if/else separates items that contain xml (this is a nokogiri oddity that occurs where
+ # Nokogiri does not directly map to an item that is changed thus it still contains xml - this is the
+ # purpose of hash_map), items that represent xml attribute changes, and normal changes.
+ if is_xml?(element)
element = Hash.from_xml(element)
hash_map(element).each do |item|
arr_path = top_level_arr_path("#{path}/#{item['path']}", orig_h, conv_h)
+ # this layer of if/else structure is used to separately evaluate implicit array changes in the xml.
+ # This is why arr_path will evaluate true if the element in question is an array
if arr_path && path_not_checked?("#{path}/#{item['path']}", arr_paths)
if path_not_checked?(arr_path, arr_paths)
arr_paths << arr_path
- array_comparison(arr_path, orig_h, conv_h).each { |item| add_to_report(item[0], item[1], item[2], hide_items, disp, json_output, text_output) }
+ array_comparison(arr_path, orig_h, conv_h).each { |item| add_to_report(item[0], item[1], item[2], hide_items, display, json_output, text_output) }
end
elsif path_not_checked?("#{path}/#{item['path']}", arr_paths)
- add_to_report(change, item['value'], "#{path}/#{item['path']}", hide_items, disp, json_output, text_output)
+ add_to_report(change, item['value'], "#{path}/#{item['path']}", hide_items, display, json_output, text_output)
end
end
- elsif (attr,val = is_attribute?(node))
- add_to_report(change, val, "#{path}/#{attr}" , hide_items, disp, json_output, text_output)
+ elsif (attr,val = is_attribute?(element))
+ add_to_report(change, val, "#{path}/#{attr}" , hide_items, display, json_output, text_output)
else
- add_to_report(change, element, path, hide_items, disp, json_output, text_output)
+ add_to_report(change, element, path, hide_items, display, json_output, text_output)
end
end
end
- return text_output if disp == 'text'
- return json_output if disp == 'json'
+ return text_output if display == 'text'
+ return json_output if display == 'json'
end
- def is_xml?(node)
- # checks if the node being passed is xml
+ def is_xml?(element)
+ # checks if the element being passed is xml
# may be beneficial to add more checks
- node.to_xml.include?('<' && '' && '>') ? true : false
+ element.include?('<' && '' && '>') ? true : false
end
- def is_attribute?(node)
- # this method checks if the node being passed is an attribute change;
+ def is_attribute?(element)
+ # this method checks if the element being passed is an attribute change;
# TODO: it may be beneficial to add more conditions to improve accuracy
- if node.to_xml.include?('=') && !node.to_xml.include?(' = ')
+ if element.include?('=') && !element.include?(' = ')
attr_val = Array.new
- node.to_xml.split('=').each {|item| attr_val << item.strip.delete('\\"')}
+ element.split('=').each {|item| attr_val << item.strip.delete('\\"')}
attr_val
else
false
@@ -123,13 +119,13 @@ def top_level_arr_path(path, orig_h, conv_h)
path_contains_array = false
end
- def add_to_report(change, element, path, hide_items, disp, json_output, text_output)
+ def add_to_report(change, element, path, hide_items, display, json_output, text_output)
@counter ||= 0 and @counter += 1
# this function serves to preclude complex nests from forming in loss_report_output the
# following 'if' structure is intended to increase readability by eliminating nests
- return text_output.concat("#{@counter}.".ljust(4)+"#{change}: #{element}".ljust(60) + path + "\n") if hide_items == false && disp == 'text'
- return text_output.concat("#{@counter}.".ljust(4)+"#{change}: ".ljust(3) + path + "\n") if hide_items == true && disp == 'text'
- return json_output["#{@counter}. #{change}: #{path}"] = element if disp == 'json'
+ return text_output.concat("#{@counter}.".ljust(4)+"#{change}: #{element}".ljust(60) + path + "\n") if hide_items == false && display == 'text'
+ return text_output.concat("#{@counter}.".ljust(4)+"#{change}: ".ljust(3) + path + "\n") if hide_items == true && display == 'text'
+ return json_output["#{@counter}. #{change}: #{path}"] = element if display == 'json'
end
def hash_map(hash)
diff --git a/config/routes.rb b/config/routes.rb
index b5e17c668..5c72eb988 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -69,7 +69,7 @@
get '/collections/:id/download_xml/:format(/:revision_id)' => 'collections#download_xml', as: 'download_collection_xml'
get '/collections/:id/create_delete_proposal' => 'collections#create_delete_proposal', as: 'create_delete_proposal_collection'
get '/collections/:id/create_update_proposal' => 'collections#create_update_proposal', as: 'create_update_proposal_collection'
- get '/collections/:id/loss' => 'collections#loss_report', as: 'loss_report_collections'
+ get '/collections/:id/loss_report' => 'collections#loss_report', as: 'loss_report_collections'
resource :variable_generation_processes_search, only: [:new]
diff --git a/spec/features/collections/loss_report_spec.rb b/spec/features/collections/loss_report_spec.rb
index 6c84110af..e240add59 100644
--- a/spec/features/collections/loss_report_spec.rb
+++ b/spec/features/collections/loss_report_spec.rb
@@ -1,7 +1,10 @@
-describe 'Displaying the comparison report in browser' do
-
- context 'when accessing the comparison report' do
+describe 'Displaying the loss report in browser' do
+ context 'when accessing the loss report' do
+
+ let(:echo_concept_id) { echo_concept_id = cmr_client.get_collections({'EntryTitle': 'Anthropogenic Biomes of the World, Version 2: 1700'}).body.dig('items',0,'meta','concept-id') }
+ let(:dif_concept_id) { dif_concept_id = cmr_client.get_collections({'EntryTitle': '2000 Pilot Environmental Sustainability Index (ESI)'}).body.dig('items',0,'meta','concept-id') }
+ let(:iso_concept_id) { iso_concept_id = cmr_client.get_collections({'EntryTitle': 'SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002'}).body.dig('items',0,'meta','concept-id') }
before do
login
@@ -9,31 +12,38 @@
context 'when displaying json' do
it 'properly displays the echo json report' do
- visit loss_report_collections_path(echo_id, format:'json')
- expect(page).to have_content('application/echo')
- end
- it 'properly displays the iso json report' do
- visit loss_report_collections_path(iso_id, format:'json')
- expect(page).to have_content('application/iso')
+ visit loss_report_collections_path(echo_concept_id, format:'json')
+ expect(page.text.gsub(/\s+/, "")).to have_text(File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_echo_sample.json').gsub(/\s+/, ""))
end
it 'properly displays the dif json report' do
- visit loss_report_collections_path(dif_id, format:'json')
- expect(page).to have_content('application/dif')
+ visit loss_report_collections_path(dif_concept_id, format:'json')
+ expect(page.text.gsub(/\s+/, "")).to have_text(File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_dif_sample.json').gsub(/\s+/, ""))
+ end
+ it 'properly displays the iso json report' do
+ visit loss_report_collections_path(iso_concept_id, format:'json')
+ # the reason this iso example has to be split is that cmr adds/updates a couple 'id' attributes
+ # in the actual collection (every time it is translated) and therefore the the comparison report will always include these changes
+ # except with a different value for the 'id' attribute. In order to bypass this issue we ignore the 'id' changes by using them as #split delimiters
+ string_part = File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_iso_sample.json').gsub(/\s+/, "").split(/dc714eaf5-01b3-4705-9031-f35c87e98529|dd8cd38ba-0984-4af1-9a10-b6e303388cc4/)
+ expect(page.text.gsub(/\s+/, "")).to have_text(string_part[0])
+ expect(page.text.gsub(/\s+/, "")).to have_text(string_part[1])
+ expect(page.text.gsub(/\s+/, "")).to have_text(string_part[2])
end
end
context 'when displaying text' do
+
it 'properly displays the echo text report' do
- visit loss_report_collections_path(echo_id, format:'text')
- expect(page).to have_content('application/echo')
- end
- it 'properly displays the iso text report' do
- visit loss_report_collections_path(iso_id, format:'text')
- expect(page).to have_content('application/iso')
+ visit loss_report_collections_path(echo_concept_id, format:'text')
+ expect(page.text.gsub(/\s+/, "")).to have_text(File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_echo_sample.text').gsub(/\s+/, ""))
end
it 'properly displays the dif text report' do
- visit loss_report_collections_path(dif_id, format:'text')
- expect(page).to have_content('application/dif')
+ visit loss_report_collections_path(dif_concept_id, format:'text')
+ expect(page.text.gsub(/\s+/, "")).to have_text(File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_dif_sample.text').gsub(/\s+/, ""))
+ end
+ it 'properly displays the iso text report' do
+ visit loss_report_collections_path(iso_concept_id, format:'text')
+ expect(page.text.gsub(/\s+/, "")).to have_text(File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_iso_sample.text').gsub(/\s+/, ""))
end
end
end
diff --git a/spec/fixtures/loss_report_samples/loss_report_dif_sample.json b/spec/fixtures/loss_report_samples/loss_report_dif_sample.json
new file mode 100644
index 000000000..9c84a71c2
--- /dev/null
+++ b/spec/fixtures/loss_report_samples/loss_report_dif_sample.json
@@ -0,0 +1,18 @@
+{
+ "format": "application/dif10+xml",
+ "1. -: /DIF/Temporal_Coverage/Temporal_Range_Type": "Long Range",
+ "2. -: /DIF/Related_URL[1]": {
+ "URL_Content_Type": {
+ "Type": "VIEW DATA SET LANDING PAGE"
+ },
+ "URL": "http://dx.doi.org/10.7927/H4NK3BZJ",
+ "Description": "data set DOI and homepage"
+ },
+ "3. +: /DIF/Related_URL[1]": {
+ "URL_Content_Type": {
+ "Type": "DATA SET LANDING PAGE"
+ },
+ "URL": "http://dx.doi.org/10.7927/H4NK3BZJ",
+ "Description": "data set DOI and homepage"
+ }
+}
\ No newline at end of file
diff --git a/spec/fixtures/loss_report_samples/loss_report_dif_sample.text b/spec/fixtures/loss_report_samples/loss_report_dif_sample.text
new file mode 100644
index 000000000..449df6003
--- /dev/null
+++ b/spec/fixtures/loss_report_samples/loss_report_dif_sample.text
@@ -0,0 +1,5 @@
+application/dif10+xml
+
+1. -: /DIF/Temporal_Coverage/Temporal_Range_Type
+2. -: /DIF/Related_URL[1]
+3. +: /DIF/Related_URL[1]
diff --git a/spec/fixtures/loss_report_samples/loss_report_echo_sample.json b/spec/fixtures/loss_report_samples/loss_report_echo_sample.json
new file mode 100644
index 000000000..42c2c5f88
--- /dev/null
+++ b/spec/fixtures/loss_report_samples/loss_report_echo_sample.json
@@ -0,0 +1,121 @@
+{
+ "format": "application/echo10+xml",
+ "1. -: /Collection/Orderable": "true",
+ "2. -: /Collection/Visible": "true",
+ "3. -: /Collection/MaintenanceAndUpdateFrequency": "As needed",
+ "4. +: /Collection/Temporal/EndsAtPresentFlag": "false",
+ "5. +: /Collection/Temporal/RangeDateTime/BeginningDateTime": "1970-01-01T00:00:00.000Z",
+ "6. +: /Collection/Platforms/Platform/ShortName": "Not provided",
+ "7. +: /Collection/Platforms/Platform/LongName": "Not provided",
+ "8. +: /Collection/Platforms/Platform/Type": "Not provided",
+ "9. -: /Collection/AssociatedDIFs/DIF/EntryId": "CIESIN_SEDAC_ANTHROMES_v2_1700",
+ "10. -: /Collection/InsertTime": "2014-05-13T00:00:00Z",
+ "11. +: /Collection/InsertTime": "2014-05-13T00:00:00.000Z",
+ "12. -: /Collection/LastUpdate": "2015-08-04T00:00:00Z",
+ "13. +: /Collection/LastUpdate": "2015-08-04T00:00:00.000Z",
+ "14. -: /Collection/LongName": "Anthropogenic Biomes of the World, Version 2: 1700",
+ "15. +: /Collection/LongName": "Not provided",
+ "16. -: /Collection/CollectionState": "Final",
+ "17. +: /Collection/CollectionState": "NOT PROVIDED",
+ "18. -: /Collection/Price": "0",
+ "19. +: /Collection/Price": " 0.00",
+ "20. -: /Collection/SpatialKeywords/Keyword[0]": "Africa",
+ "21. -: /Collection/SpatialKeywords/Keyword[1]": "Asia",
+ "22. +: /Collection/SpatialKeywords/Keyword[0]": "AFRICA",
+ "23. +: /Collection/SpatialKeywords/Keyword[1]": "GAZA STRIP",
+ "24. -: /Collection/Contacts/Contact[0]": {
+ "Role": "Archive",
+ "HoursOfService": "9:00 A.M. to 5:00 P.M., Monday to Friday",
+ "OrganizationName": "Socioeconomic Data and Applications Center (SEDAC)",
+ "OrganizationAddresses": {
+ "Address": {
+ "StreetAddress": "CIESIN, Columbia University, 61 Route 9W, P.O. Box 1000",
+ "City": "Palisades",
+ "StateProvince": "NY",
+ "PostalCode": "10964",
+ "Country": "USA"
+ }
+ },
+ "OrganizationPhones": {
+ "Phone": [
+ {
+ "Number": "+1 845-365-8920",
+ "Type": "Telephone"
+ },
+ {
+ "Number": "+1 845-365-8922",
+ "Type": "Fax"
+ }
+ ]
+ },
+ "OrganizationEmails": {
+ "Email": "ciesin.info@ciesin.columbia.edu"
+ },
+ "ContactPersons": {
+ "ContactPerson": {
+ "FirstName": "SEDAC",
+ "MiddleName": "User",
+ "LastName": "Services"
+ }
+ }
+ },
+ "25. +: /Collection/Contacts/Contact[0]": {
+ "Role": "PROCESSOR",
+ "OrganizationName": "SEDAC"
+ },
+ "26. +: /Collection/Contacts/Contact[1]": {
+ "Role": "ARCHIVER",
+ "OrganizationName": "SEDAC"
+ },
+ "27. +: /Collection/Contacts/Contact[2]": {
+ "Role": "ARCHIVER",
+ "HoursOfService": "9:00 A.M. to 5:00 P.M., Monday to Friday",
+ "OrganizationName": "Socioeconomic Data and Applications Center (SEDAC)",
+ "OrganizationAddresses": {
+ "Address": {
+ "StreetAddress": "CIESIN, Columbia University, 61 Route 9W, P.O. Box 1000",
+ "City": "Palisades",
+ "StateProvince": "NY",
+ "PostalCode": "10964",
+ "Country": "USA"
+ }
+ },
+ "OrganizationPhones": {
+ "Phone": [
+ {
+ "Number": "+1 845-365-8920",
+ "Type": "Telephone"
+ },
+ {
+ "Number": "+1 845-365-8922",
+ "Type": "Fax"
+ }
+ ]
+ },
+ "OrganizationEmails": {
+ "Email": "ciesin.info@ciesin.columbia.edu"
+ },
+ "ContactPersons": {
+ "ContactPerson": {
+ "FirstName": "SEDAC",
+ "MiddleName": "User",
+ "LastName": "Services",
+ "JobPosition": "TECHNICAL CONTACT"
+ }
+ }
+ },
+ "28. -: /Collection/SpatialInfo/SpatialCoverageType": "Horizontal",
+ "29. +: /Collection/SpatialInfo/SpatialCoverageType": "HORIZONTAL",
+ "30. -: /Collection/OnlineResources/OnlineResource/Type": "DOI URL",
+ "31. +: /Collection/OnlineResources/OnlineResource/Type": "CollectionURL : DATA SET LANDING PAGE",
+ "32. -: /Collection/Spatial/SpatialCoverageType": "Horizontal",
+ "33. +: /Collection/Spatial/SpatialCoverageType": "HORIZONTAL",
+ "34. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate": "-180.000000",
+ "35. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate": "-180.0",
+ "36. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate": "90.000000",
+ "37. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate": "90.0",
+ "38. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate": "180.000000",
+ "39. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate": "180.0",
+ "40. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate": "-90.000000",
+ "41. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate": "-90.0"
+}
\ No newline at end of file
diff --git a/spec/fixtures/loss_report_samples/loss_report_echo_sample.text b/spec/fixtures/loss_report_samples/loss_report_echo_sample.text
new file mode 100644
index 000000000..3a798c2df
--- /dev/null
+++ b/spec/fixtures/loss_report_samples/loss_report_echo_sample.text
@@ -0,0 +1,43 @@
+application/echo10+xml
+
+1. -: /Collection/Orderable
+2. -: /Collection/Visible
+3. -: /Collection/MaintenanceAndUpdateFrequency
+4. +: /Collection/Temporal/EndsAtPresentFlag
+5. +: /Collection/Temporal/RangeDateTime/BeginningDateTime
+6. +: /Collection/Platforms/Platform/ShortName
+7. +: /Collection/Platforms/Platform/LongName
+8. +: /Collection/Platforms/Platform/Type
+9. -: /Collection/AssociatedDIFs/DIF/EntryId
+10. -: /Collection/InsertTime
+11. +: /Collection/InsertTime
+12. -: /Collection/LastUpdate
+13. +: /Collection/LastUpdate
+14. -: /Collection/LongName
+15. +: /Collection/LongName
+16. -: /Collection/CollectionState
+17. +: /Collection/CollectionState
+18. -: /Collection/Price
+19. +: /Collection/Price
+20. -: /Collection/SpatialKeywords/Keyword[0]
+21. -: /Collection/SpatialKeywords/Keyword[1]
+22. +: /Collection/SpatialKeywords/Keyword[0]
+23. +: /Collection/SpatialKeywords/Keyword[1]
+24. -: /Collection/Contacts/Contact[0]
+25. +: /Collection/Contacts/Contact[0]
+26. +: /Collection/Contacts/Contact[1]
+27. +: /Collection/Contacts/Contact[2]
+28. -: /Collection/SpatialInfo/SpatialCoverageType
+29. +: /Collection/SpatialInfo/SpatialCoverageType
+30. -: /Collection/OnlineResources/OnlineResource/Type
+31. +: /Collection/OnlineResources/OnlineResource/Type
+32. -: /Collection/Spatial/SpatialCoverageType
+33. +: /Collection/Spatial/SpatialCoverageType
+34. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate
+35. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate
+36. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate
+37. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate
+38. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate
+39. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate
+40. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate
+41. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate
diff --git a/spec/fixtures/loss_report_samples/loss_report_iso_sample.json b/spec/fixtures/loss_report_samples/loss_report_iso_sample.json
new file mode 100644
index 000000000..02a2a052d
--- /dev/null
+++ b/spec/fixtures/loss_report_samples/loss_report_iso_sample.json
@@ -0,0 +1,793 @@
+{
+ "format": "application/iso:smap+xml",
+ "1. -: /DS_Series/schemaLocation": "http://www.isotc211.org/2005/gmi http://cdn.earthdata.nasa.gov/iso/schema/1.0/ISO19115-2_EOS.xsd",
+ "2. -: /DS_Series/seriesMetadata/MI_Metadata/fileIdentifier/FileName": "L4_SM_aup",
+ "3. -: /DS_Series/seriesMetadata/MI_Metadata/characterSet/MD_CharacterSetCode": "utf8",
+ "4. -: /DS_Series/seriesMetadata/MI_Metadata/hierarchyLevel/MD_ScopeCode": "series",
+ "5. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/attributeDescription": null,
+ "6. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/contentType": null,
+ "7. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/code/CharacterString": "Not provided",
+ "8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString": "gov.nasa.esdis.umm.processinglevelid",
+ "9. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardName/CharacterString": "ISO 19115-2 Geographic information - Metadata - Part 2: Extensions for imagery and gridded data",
+ "10. +: /DS_Series/seriesMetadata/MI_Metadata/dataQualityInfo/DQ_DataQuality/scope/DQ_Scope/level/MD_ScopeCode": "series",
+ "11. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/id": "dc714eaf5-01b3-4705-9031-f35c87e98529",
+ "12. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/code/CharacterString": "Not provided",
+ "13. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/codeSpace/CharacterString": "gov.nasa.esdis.umm.platformshortname",
+ "14. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/description/CharacterString": "Not provided",
+ "15. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/instrument/nilReason": "inapplicable",
+ "16. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardVersion/CharacterString": "ISO 19115-2:2009-02-15",
+ "17. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]": {
+ "MD_DataIdentification": {
+ "citation": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update"
+ },
+ "date": {
+ "CI_Date": {
+ "date": {
+ "Date": "2016-04-29"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "revision"
+ }
+ }
+ },
+ "edition": {
+ "CharacterString": "Vv2010"
+ },
+ "identifier": [
+ {
+ "MD_Identifier": {
+ "code": {
+ "CharacterString": "SPL4SMAU"
+ },
+ "codeSpace": {
+ "CharacterString": "http://gmao.gsfc.nasa.gov"
+ },
+ "description": {
+ "CharacterString": "The ECS Short Name"
+ }
+ }
+ },
+ {
+ "MD_Identifier": {
+ "code": {
+ "CharacterString": "002"
+ },
+ "codeSpace": {
+ "CharacterString": "gov.nasa.esdis"
+ },
+ "description": {
+ "CharacterString": "The ECS Version ID"
+ }
+ }
+ },
+ {
+ "MD_Identifier": {
+ "code": {
+ "Anchor": "doi:10.5067/JJY2V0GJNFRZ"
+ },
+ "codeSpace": {
+ "CharacterString": "gov.nasa.esdis"
+ },
+ "description": {
+ "CharacterString": "A Digital Object Identifier (DOI) that provides a persistent interoperable means to locate the SMAP Level 4 Radar data product."
+ }
+ }
+ }
+ ],
+ "citedResponsibleParty": [
+ {
+ "CI_ResponsibleParty": {
+ "organisationName": {
+ "CharacterString": "National Aeronautics and Space Administration"
+ },
+ "role": {
+ "CI_RoleCode": "resourceProvider"
+ }
+ }
+ },
+ {
+ "CI_ResponsibleParty": {
+ "organisationName": {
+ "CharacterString": "Global Modeling and Assimilation Office"
+ },
+ "role": {
+ "CI_RoleCode": "originator"
+ }
+ }
+ }
+ ],
+ "presentationForm": {
+ "CI_PresentationFormCode": "documentDigital"
+ },
+ "series": {
+ "CI_Series": {
+ "name": {
+ "CharacterString": "L4_SM"
+ }
+ }
+ },
+ "otherCitationDetails": {
+ "CharacterString": "The first Validated Release of the SMAP Level 4 Science Processing Software."
+ }
+ }
+ },
+ "abstract": {
+ "CharacterString": "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
+ },
+ "purpose": {
+ "CharacterString": "The SMAP L4_SM data product provides spatially and temporally complete surface and root zone soil moisture information for science and applications users."
+ },
+ "credit": {
+ "CharacterString": "The software that generates the L4_SM data product and the data system that automates its production were designed and implemented at the NASA Global Modeling and Assimilation Office, Goddard Space Flight Center, Greenbelt, Maryland, USA."
+ },
+ "status": {
+ "MD_ProgressCode": "onGoing"
+ },
+ "pointOfContact": {
+ "CI_ResponsibleParty": {
+ "organisationName": {
+ "CharacterString": "PVC"
+ },
+ "role": {
+ "CI_RoleCode": "distributor"
+ }
+ }
+ },
+ "resourceMaintenance": {
+ "MD_MaintenanceInformation": {
+ "maintenanceAndUpdateFrequency": {
+ "MD_MaintenanceFrequencyCode": "As Needed"
+ },
+ "dateOfNextUpdate": {
+ "Date": "2016-11-01"
+ },
+ "updateScope": {
+ "MD_ScopeCode": "series"
+ }
+ }
+ },
+ "resourceFormat": {
+ "MD_Format": {
+ "name": {
+ "CharacterString": "HDF5"
+ },
+ "version": {
+ "CharacterString": "Version 1.8.9"
+ }
+ }
+ },
+ "descriptiveKeywords": [
+ {
+ "MD_Keywords": {
+ "keyword": [
+ {
+ "CharacterString": "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT"
+ },
+ {
+ "CharacterString": "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > SURFACE SOIL MOISTURE"
+ },
+ {
+ "CharacterString": "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > ROOT ZONE SOIL MOISTURE"
+ }
+ ],
+ "type": {
+ "MD_KeywordTypeCode": "theme"
+ },
+ "thesaurusName": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "NASA/GCMD Earth Science Keywords"
+ },
+ "date": {
+ "gco:nilReason": "missing"
+ }
+ }
+ }
+ }
+ },
+ {
+ "MD_Keywords": {
+ "keyword": {
+ "CharacterString": "Earth Remote Sensing Instruments > Active Remote Sensing > NONE > SMAP L-BAND RADAR > SMAP L-Band Radar"
+ },
+ "type": {
+ "MD_KeywordTypeCode": "theme"
+ },
+ "thesaurusName": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "NASA/GCMD Earth Science Keywords"
+ },
+ "date": {
+ "gco:nilReason": "missing"
+ }
+ }
+ }
+ }
+ },
+ {
+ "MD_Keywords": {
+ "keyword": {
+ "CharacterString": "Earth Observation Satellites > NASA Decadal Survey > SMAP > Soil Moisture Active and Passive Observatory"
+ },
+ "type": {
+ "MD_KeywordTypeCode": "theme"
+ },
+ "thesaurusName": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "NASA/GCMD Earth Science Keywords"
+ },
+ "date": {
+ "gco:nilReason": "missing"
+ }
+ }
+ }
+ }
+ },
+ {
+ "MD_Keywords": {
+ "keyword": {
+ "CharacterString": "GEOGRAPHIC REGION > GLOBAL"
+ },
+ "type": {
+ "MD_KeywordTypeCode": "theme"
+ },
+ "thesaurusName": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "NASA/GCMD Earth Science Keywords"
+ },
+ "date": {
+ "gco:nilReason": "missing"
+ }
+ }
+ }
+ }
+ }
+ ],
+ "aggregationInfo": {
+ "MD_AggregateInformation": {
+ "aggregateDataSetIdentifier": {
+ "MD_Identifier": {
+ "code": {
+ "CharacterString": "SMAP"
+ }
+ }
+ },
+ "associationType": {
+ "DS_AssociationTypeCode": "largerWorkCitation"
+ },
+ "initiativeType": {
+ "DS_InitiativeTypeCode": "mission"
+ }
+ }
+ },
+ "language": {
+ "CharacterString": "eng"
+ },
+ "characterSet": {
+ "MD_CharacterSetCode": "utf8"
+ },
+ "topicCategory": {
+ "MD_TopicCategoryCode": "geoscientificInformation"
+ },
+ "environmentDescription": {
+ "CharacterString": "Data product generated by the SMAP mission in HDF5 format with metadata that conforms to the ISO 19115 model."
+ },
+ "extent": {
+ "EX_Extent": {
+ "description": {
+ "CharacterString": "Global land excluding inland water and permanent ice."
+ },
+ "geographicElement": {
+ "EX_GeographicBoundingBox": {
+ "extentTypeCode": {
+ "Boolean": "1"
+ },
+ "westBoundLongitude": {
+ "Decimal": "-180"
+ },
+ "eastBoundLongitude": {
+ "Decimal": "180"
+ },
+ "southBoundLatitude": {
+ "Decimal": "-85.04456"
+ },
+ "northBoundLatitude": {
+ "Decimal": "85.04456"
+ }
+ }
+ },
+ "temporalElement": {
+ "EX_TemporalExtent": {
+ "extent": {
+ "TimePeriod": {
+ "gml:id": "swathTemporalExtent",
+ "beginPosition": "2015-03-31T01:30:00.000Z",
+ "endPosition": "2021-01-01T01:29:59.999Z"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "18. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]": {
+ "MD_DataIdentification": {
+ "citation": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "Soil Moisture Active Passive Mission Level 4 Surface and Root Zone Soil Moisture (L4_SM) Product Specification Document"
+ },
+ "date": {
+ "CI_Date": {
+ "date": {
+ "Date": "2015-10-31"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "publication"
+ }
+ }
+ },
+ "edition": {
+ "CharacterString": "1.4"
+ },
+ "identifier": {
+ "MD_Identifier": {
+ "code": {
+ "CharacterString": "L4_SM"
+ },
+ "codeSpace": {
+ "CharacterString": "http://gmao.gsfc.nasa.gov"
+ },
+ "description": {
+ "CharacterString": "A short name used by the Soil Moisture Active Passive (SMAP) mission to identify the Level 4 Radar product."
+ }
+ }
+ },
+ "presentationForm": {
+ "CI_PresentationFormCode": "documentDigital"
+ },
+ "series": {
+ "CI_Series": {
+ "name": {
+ "CharacterString": "L4_SM"
+ }
+ }
+ }
+ }
+ },
+ "abstract": {
+ "CharacterString": "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
+ },
+ "language": {
+ "CharacterString": "eng"
+ }
+ }
+ },
+ "19. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[2]": {
+ "MD_DataIdentification": {
+ "citation": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "DataSetId"
+ },
+ "date": {
+ "CI_Date": {
+ "date": {
+ "DateTime": "2016-09-12T11:50:19.050Z"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "revision"
+ }
+ }
+ }
+ }
+ },
+ "abstract": {
+ "CharacterString": "DataSetId"
+ },
+ "aggregationInfo": {
+ "MD_AggregateInformation": {
+ "aggregateDataSetIdentifier": {
+ "MD_Identifier": {
+ "code": {
+ "CharacterString": "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002"
+ }
+ }
+ },
+ "associationType": null
+ }
+ },
+ "language": {
+ "CharacterString": "eng"
+ }
+ }
+ },
+ "20. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[3]": {
+ "MD_DataIdentification": {
+ "citation": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "InsertTime"
+ },
+ "date": {
+ "CI_Date": {
+ "date": {
+ "DateTime": "2016-09-08T09:16:24.835Z"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "creation"
+ }
+ }
+ }
+ }
+ },
+ "abstract": {
+ "CharacterString": "InsertTime"
+ },
+ "purpose": {
+ "CharacterString": "InsertTime"
+ },
+ "language": {
+ "CharacterString": "eng"
+ }
+ }
+ },
+ "21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]": {
+ "MD_DataIdentification": {
+ "citation": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "UpdateTime"
+ },
+ "date": {
+ "CI_Date": {
+ "date": {
+ "DateTime": "2016-09-12T11:50:19.050Z"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "revision"
+ }
+ }
+ }
+ }
+ },
+ "abstract": {
+ "CharacterString": "UpdateTime"
+ },
+ "purpose": {
+ "CharacterString": "UpdateTime"
+ },
+ "language": {
+ "CharacterString": "eng"
+ }
+ }
+ },
+ "22. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[5]": {
+ "MD_DataIdentification": {
+ "citation": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "DIFID"
+ },
+ "date": {
+ "CI_Date": {
+ "date": {
+ "DateTime": "2016-09-12T11:50:19.050Z"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "revision"
+ }
+ }
+ },
+ "identifier": {
+ "MD_Identifier": {
+ "code": {
+ "CharacterString": "SPL4SMAU"
+ }
+ }
+ }
+ }
+ },
+ "abstract": {
+ "CharacterString": "DIFID"
+ },
+ "purpose": {
+ "CharacterString": "DIFID"
+ },
+ "language": {
+ "CharacterString": "eng"
+ }
+ }
+ },
+ "23. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]": {
+ "MD_DataIdentification": {
+ "citation": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update"
+ },
+ "date": [
+ {
+ "CI_Date": {
+ "date": {
+ "DateTime": "2016-04-29T00:00:00.000Z"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "revision"
+ }
+ }
+ },
+ {
+ "CI_Date": {
+ "date": {
+ "DateTime": "2016-09-12T11:50:19.050Z"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "revision"
+ }
+ }
+ },
+ {
+ "CI_Date": {
+ "date": {
+ "DateTime": "2016-09-08T09:16:24.835Z"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "creation"
+ }
+ }
+ }
+ ],
+ "edition": {
+ "CharacterString": "Vv2010"
+ },
+ "identifier": [
+ {
+ "MD_Identifier": {
+ "code": {
+ "CharacterString": "SPL4SMAU"
+ },
+ "description": {
+ "CharacterString": "The ECS Short Name"
+ }
+ }
+ },
+ {
+ "MD_Identifier": {
+ "code": {
+ "CharacterString": "002"
+ },
+ "description": {
+ "CharacterString": "The ECS Version ID"
+ }
+ }
+ },
+ {
+ "MD_Identifier": {
+ "code": {
+ "CharacterString": "doi:10.5067/JJY2V0GJNFRZ"
+ },
+ "codeSpace": {
+ "CharacterString": "gov.nasa.esdis.umm.doi"
+ },
+ "description": {
+ "CharacterString": "DOI"
+ }
+ }
+ }
+ ],
+ "presentationForm": {
+ "CI_PresentationFormCode": "documentDigital"
+ },
+ "series": {
+ "CI_Series": {
+ "name": {
+ "CharacterString": "L4_SM"
+ }
+ }
+ },
+ "otherCitationDetails": {
+ "CharacterString": "The first Validated Release of the SMAP Level 4 Science Processing Software."
+ }
+ }
+ },
+ "abstract": {
+ "CharacterString": "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
+ },
+ "purpose": {
+ "gco:nilReason": "missing",
+ "CharacterString": "The SMAP L4_SM data product provides spatially and temporally complete surface and root zone soil moisture information for science and applications users."
+ },
+ "status": {
+ "MD_ProgressCode": "onGoing"
+ },
+ "pointOfContact": {
+ "CI_ResponsibleParty": {
+ "organisationName": {
+ "CharacterString": "PVC"
+ },
+ "role": {
+ "CI_RoleCode": "distributor"
+ }
+ }
+ },
+ "descriptiveKeywords": [
+ {
+ "MD_Keywords": {
+ "keyword": [
+ {
+ "CharacterString": "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > NONE"
+ },
+ {
+ "CharacterString": "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > SURFACE SOIL MOISTURE"
+ },
+ {
+ "CharacterString": "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > ROOT ZONE SOIL MOISTURE"
+ }
+ ],
+ "type": {
+ "MD_KeywordTypeCode": "theme"
+ },
+ "thesaurusName": {
+ "gco:nilReason": "unknown"
+ }
+ }
+ },
+ {
+ "MD_Keywords": {
+ "keyword": {
+ "CharacterString": "Aircraft > Not provided > Not provided > "
+ }
+ }
+ }
+ ],
+ "language": {
+ "CharacterString": "eng"
+ },
+ "topicCategory": {
+ "MD_TopicCategoryCode": "geoscientificInformation"
+ },
+ "extent": {
+ "EX_Extent": {
+ "geographicElement": {
+ "EX_GeographicBoundingBox": {
+ "extentTypeCode": {
+ "Boolean": "1"
+ },
+ "westBoundLongitude": {
+ "Decimal": "-180.0"
+ },
+ "eastBoundLongitude": {
+ "Decimal": "180.0"
+ },
+ "southBoundLatitude": {
+ "Decimal": "-85.04456"
+ },
+ "northBoundLatitude": {
+ "Decimal": "85.04456"
+ }
+ }
+ },
+ "temporalElement": {
+ "EX_TemporalExtent": {
+ "extent": {
+ "TimePeriod": {
+ "gml:id": "dd8cd38ba-0984-4af1-9a10-b6e303388cc4",
+ "beginPosition": "2015-03-31T01:30:00.000Z",
+ "endPosition": "2021-01-01T01:29:59.999Z"
+ }
+ }
+ }
+ }
+ }
+ },
+ "processingLevel": {
+ "MD_Identifier": {
+ "code": {
+ "CharacterString": "Not provided"
+ },
+ "codeSpace": {
+ "CharacterString": "gov.nasa.esdis.umm.processinglevelid"
+ }
+ }
+ }
+ }
+ },
+ "24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]": {
+ "MD_DataIdentification": {
+ "citation": {
+ "CI_Citation": {
+ "title": {
+ "CharacterString": "DataSetId"
+ },
+ "date": [
+ {
+ "CI_Date": {
+ "date": {
+ "DateTime": "2016-04-29T00:00:00.000Z"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "revision"
+ }
+ }
+ },
+ {
+ "CI_Date": {
+ "date": {
+ "DateTime": "2016-09-12T11:50:19.050Z"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "revision"
+ }
+ }
+ },
+ {
+ "CI_Date": {
+ "date": {
+ "DateTime": "2016-09-08T09:16:24.835Z"
+ },
+ "dateType": {
+ "CI_DateTypeCode": "creation"
+ }
+ }
+ }
+ ],
+ "citedResponsibleParty": {
+ "CI_ResponsibleParty": {
+ "organisationName": {
+ "CharacterString": "Global Modeling and Assimilation Office"
+ },
+ "role": {
+ "CI_RoleCode": "originator"
+ }
+ }
+ }
+ }
+ },
+ "abstract": {
+ "CharacterString": "DataSetId"
+ },
+ "resourceFormat": {
+ "MD_Format": {
+ "name": {
+ "CharacterString": "HDF5"
+ },
+ "version": {
+ "gco:nilReason": "unknown"
+ }
+ }
+ },
+ "aggregationInfo": {
+ "MD_AggregateInformation": {
+ "aggregateDataSetIdentifier": {
+ "MD_Identifier": {
+ "code": {
+ "CharacterString": "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002"
+ }
+ }
+ },
+ "associationType": {
+ "DS_AssociationTypeCode": "largerWorkCitation"
+ }
+ }
+ },
+ "language": {
+ "CharacterString": "eng"
+ }
+ }
+ },
+ "25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString": "NSIDC DAAC > National Snow and Ice Data Center DAAC",
+ "26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString": "nsidc@nsidc.org",
+ "27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL": "http://nsidc.org/daac/",
+ "28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode": "pointOfContact",
+ "29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href": "#alaskaSARContact",
+ "30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date": "2016-04-29",
+ "31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date": "2013-01-02"
+}
\ No newline at end of file
diff --git a/spec/fixtures/loss_report_samples/loss_report_iso_sample.text b/spec/fixtures/loss_report_samples/loss_report_iso_sample.text
new file mode 100644
index 000000000..4a98bd38d
--- /dev/null
+++ b/spec/fixtures/loss_report_samples/loss_report_iso_sample.text
@@ -0,0 +1,33 @@
+application/iso:smap+xml
+
+1. -: /DS_Series/schemaLocation
+2. -: /DS_Series/seriesMetadata/MI_Metadata/fileIdentifier/FileName
+3. -: /DS_Series/seriesMetadata/MI_Metadata/characterSet/MD_CharacterSetCode
+4. -: /DS_Series/seriesMetadata/MI_Metadata/hierarchyLevel/MD_ScopeCode
+5. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/attributeDescription
+6. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/contentType
+7. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/code/CharacterString
+8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString
+9. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardName/CharacterString
+10. +: /DS_Series/seriesMetadata/MI_Metadata/dataQualityInfo/DQ_DataQuality/scope/DQ_Scope/level/MD_ScopeCode
+11. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/id
+12. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/code/CharacterString
+13. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/codeSpace/CharacterString
+14. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/description/CharacterString
+15. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/instrument/nilReason
+16. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardVersion/CharacterString
+17. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]
+18. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]
+19. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[2]
+20. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[3]
+21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]
+22. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[5]
+23. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]
+24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]
+25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString
+26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString
+27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL
+28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode
+29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href
+30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date
+31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date
diff --git a/spec/helpers/loss_report_helper_spec.rb b/spec/helpers/loss_report_helper_spec.rb
deleted file mode 100644
index 79b1a0e9c..000000000
--- a/spec/helpers/loss_report_helper_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-
-describe 'Loss Report Helper' do
- let(:umm_c_version) { '1.15.3' }
-
- context '#prepare_collections' do
- context 'when using cmr endpoints' do
- it 'successfully retrieves and translates the dif collection' do
- expect(helper.prepare_collections(dif_id, umm_c_version)).to be_truthy
- end
- it 'successfully retrieves and translates the iso collection' do
- expect(helper.prepare_collections(iso_id, umm_c_version)).to be_truthy
- end
- it 'successfully retrieves and translates the echo collection' do
- expect(helper.prepare_collections(echo_id, umm_c_version)).to be_truthy
- end
- end
-
- context '#loss_report_output'
- context 'when processing a dif collection' do
- it 'successfully produces a text loss report' do
- expect(helper.loss_report_output(dif_id).gsub(/\s+/, "")).to eql(dif_text_report.gsub(/\s+/, ""))
- end
- it 'successfully produces a json loss report' do
- expect(helper.loss_report_output(dif_id, hide_items=false, disp='json')).to eql(dif_json_report)
- end
- end
- context 'when processing an echo collection' do
- it 'successfully produces a text loss report' do
- expect(helper.loss_report_output(echo_id).gsub(/\s+/, "")).to eql(echo_text_report.gsub(/\s+/, ""))
- end
- it 'successfully produces a json loss report' do
- expect(helper.loss_report_output(echo_id, hide_items=false, disp='json')).to eql(echo_json_report)
- end
- end
- context 'when processing an iso collection' do
- it 'successfully produces a text loss report' do
- expect(helper.loss_report_output(iso_id).gsub(/\s+/, "")).to eql(iso_text_report.gsub(/\s+/, ""))
- end
- it 'successfully produces a json loss report' do
- report = helper.loss_report_output(iso_id, hide_items=false, disp='json')
- expect(report.keys.length).to be(32)
- expect(report).to have_key('8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString')
- expect(report).to have_key('21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]')
- expect(report).to have_key('24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]')
- expect(report).to have_key('31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date')
- end
- end
- # the reason the iso example only checks the last key (instead of verifying the full report) is that cmr adds/updates an 'id' attribute
- # in the actual collection (every time it is translated) and therefore the the comparison report will always include this change
- # except with a different value for the 'id' attribute. This would cause the equality between the hashes to evaluate false and fail the
- # test every time. Spot checking the output is a comparable solution because any small addition/removal should throw off the numbering system,
- # +/- symbol, or path, and this test will fail.
- end
-end
diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb
index 8a1e89ccb..9c81587a1 100644
--- a/spec/rails_helper.rb
+++ b/spec/rails_helper.rb
@@ -189,7 +189,6 @@
config.include Helpers::GroupHelper
config.include Helpers::IngestHelpers
config.include Helpers::Instrumentation
- config.include Helpers::LossReportSamplesHelper
config.include Helpers::ProposalStatusHelper
config.include Helpers::SearchHelpers
config.include Helpers::SubscriptionHelpers
diff --git a/spec/support/loss_report_samples_helper.rb b/spec/support/loss_report_samples_helper.rb
deleted file mode 100644
index de5a38c4e..000000000
--- a/spec/support/loss_report_samples_helper.rb
+++ /dev/null
@@ -1,840 +0,0 @@
-module Helpers
- module LossReportSamplesHelper
- def dif_id
- 'C1200000031-SEDAC'
- end
-
- def iso_id
- 'C1200000089-LARC'
- end
-
- def echo_id
- 'C1200000040-SEDAC'
- end
-
- def iso_json_report
- {
- "format" => "application/iso:smap+xml",
- "1. -: /DS_Series/schemaLocation" => "http://www.isotc211.org/2005/gmi http://cdn.earthdata.nasa.gov/iso/schema/1.0/ISO19115-2_EOS.xsd",
- "2. -: /DS_Series/seriesMetadata/MI_Metadata/fileIdentifier/FileName" => "L4_SM_aup",
- "3. -: /DS_Series/seriesMetadata/MI_Metadata/characterSet/MD_CharacterSetCode" => "utf8",
- "4. -: /DS_Series/seriesMetadata/MI_Metadata/hierarchyLevel/MD_ScopeCode" => "series",
- "5. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/attributeDescription" => nil,
- "6. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/contentType" => nil,
- "7. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/code/CharacterString" => "Not provided",
- "8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString" => "gov.nasa.esdis.umm.processinglevelid",
- "9. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardName/CharacterString" => "ISO 19115-2 Geographic information - Metadata - Part 2: Extensions for imagery and gridded data",
- "10. +: /DS_Series/seriesMetadata/MI_Metadata/dataQualityInfo/DQ_DataQuality/scope/DQ_Scope/level/MD_ScopeCode" => "series",
- "11. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/id" => "dba588298-ef6b-4e0f-9092-d1bfe87001ea",
- "12. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/code/CharacterString" => "Not provided",
- "13. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/codeSpace/CharacterString" => "gov.nasa.esdis.umm.platformshortname",
- "14. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/description/CharacterString" => "Not provided",
- "15. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/instrument/nilReason" => "inapplicable",
- "16. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardVersion/CharacterString" => "ISO 19115-2:2009-02-15",
- "17. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "Date" => "2016-04-29"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, "edition" => {
- "CharacterString" => "Vv2010"
- }, "identifier" => [{
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SPL4SMAU"
- }, "codeSpace" => {
- "CharacterString" => "http://gmao.gsfc.nasa.gov"
- }, "description" => {
- "CharacterString" => "The ECS Short Name"
- }
- }
- }, {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "002"
- }, "codeSpace" => {
- "CharacterString" => "gov.nasa.esdis"
- }, "description" => {
- "CharacterString" => "The ECS Version ID"
- }
- }
- }, {
- "MD_Identifier" => {
- "code" => {
- "Anchor" => "doi:10.5067/JJY2V0GJNFRZ"
- }, "codeSpace" => {
- "CharacterString" => "gov.nasa.esdis"
- }, "description" => {
- "CharacterString" => "A Digital Object Identifier (DOI) that provides a persistent interoperable means to locate the SMAP Level 4 Radar data product."
- }
- }
- }], "citedResponsibleParty" => [{
- "CI_ResponsibleParty" => {
- "organisationName" => {
- "CharacterString" => "National Aeronautics and Space Administration"
- }, "role" => {
- "CI_RoleCode" => "resourceProvider"
- }
- }
- }, {
- "CI_ResponsibleParty" => {
- "organisationName" => {
- "CharacterString" => "Global Modeling and Assimilation Office"
- }, "role" => {
- "CI_RoleCode" => "originator"
- }
- }
- }], "presentationForm" => {
- "CI_PresentationFormCode" => "documentDigital"
- }, "series" => {
- "CI_Series" => {
- "name" => {
- "CharacterString" => "L4_SM"
- }
- }
- }, "otherCitationDetails" => {
- "CharacterString" => "The first Validated Release of the SMAP Level 4 Science Processing Software."
- }
- }
- }, "abstract" => {
- "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
- }, "purpose" => {
- "CharacterString" => "The SMAP L4_SM data product provides spatially and temporally complete surface and root zone soil moisture information for science and applications users."
- }, "credit" => {
- "CharacterString" => "The software that generates the L4_SM data product and the data system that automates its production were designed and implemented at the NASA Global Modeling and Assimilation Office, Goddard Space Flight Center, Greenbelt, Maryland, USA."
- }, "status" => {
- "MD_ProgressCode" => "onGoing"
- }, "pointOfContact" => {
- "CI_ResponsibleParty" => {
- "organisationName" => {
- "CharacterString" => "PVC"
- }, "role" => {
- "CI_RoleCode" => "distributor"
- }
- }
- }, "resourceMaintenance" => {
- "MD_MaintenanceInformation" => {
- "maintenanceAndUpdateFrequency" => {
- "MD_MaintenanceFrequencyCode" => "As Needed"
- }, "dateOfNextUpdate" => {
- "Date" => "2016-11-01"
- }, "updateScope" => {
- "MD_ScopeCode" => "series"
- }
- }
- }, "resourceFormat" => {
- "MD_Format" => {
- "name" => {
- "CharacterString" => "HDF5"
- }, "version" => {
- "CharacterString" => "Version 1.8.9"
- }
- }
- }, "descriptiveKeywords" => [{
- "MD_Keywords" => {
- "keyword" => [{
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT"
- }, {
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > SURFACE SOIL MOISTURE"
- }, {
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > ROOT ZONE SOIL MOISTURE"
- }], "type" => {
- "MD_KeywordTypeCode" => "theme"
- }, "thesaurusName" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "NASA/GCMD Earth Science Keywords"
- }, "date" => {
- "gco:nilReason" => "missing"
- }
- }
- }
- }
- }, {
- "MD_Keywords" => {
- "keyword" => {
- "CharacterString" => "Earth Remote Sensing Instruments > Active Remote Sensing > NONE > SMAP L-BAND RADAR > SMAP L-Band Radar"
- }, "type" => {
- "MD_KeywordTypeCode" => "theme"
- }, "thesaurusName" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "NASA/GCMD Earth Science Keywords"
- }, "date" => {
- "gco:nilReason" => "missing"
- }
- }
- }
- }
- }, {
- "MD_Keywords" => {
- "keyword" => {
- "CharacterString" => "Earth Observation Satellites > NASA Decadal Survey > SMAP > Soil Moisture Active and Passive Observatory"
- }, "type" => {
- "MD_KeywordTypeCode" => "theme"
- }, "thesaurusName" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "NASA/GCMD Earth Science Keywords"
- }, "date" => {
- "gco:nilReason" => "missing"
- }
- }
- }
- }
- }, {
- "MD_Keywords" => {
- "keyword" => {
- "CharacterString" => "GEOGRAPHIC REGION > GLOBAL"
- }, "type" => {
- "MD_KeywordTypeCode" => "theme"
- }, "thesaurusName" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "NASA/GCMD Earth Science Keywords"
- }, "date" => {
- "gco:nilReason" => "missing"
- }
- }
- }
- }
- }], "aggregationInfo" => {
- "MD_AggregateInformation" => {
- "aggregateDataSetIdentifier" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SMAP"
- }
- }
- }, "associationType" => {
- "DS_AssociationTypeCode" => "largerWorkCitation"
- }, "initiativeType" => {
- "DS_InitiativeTypeCode" => "mission"
- }
- }
- }, "language" => {
- "CharacterString" => "eng"
- }, "characterSet" => {
- "MD_CharacterSetCode" => "utf8"
- }, "topicCategory" => {
- "MD_TopicCategoryCode" => "geoscientificInformation"
- }, "environmentDescription" => {
- "CharacterString" => "Data product generated by the SMAP mission in HDF5 format with metadata that conforms to the ISO 19115 model."
- }, "extent" => {
- "EX_Extent" => {
- "description" => {
- "CharacterString" => "Global land excluding inland water and permanent ice."
- }, "geographicElement" => {
- "EX_GeographicBoundingBox" => {
- "extentTypeCode" => {
- "Boolean" => "1"
- }, "westBoundLongitude" => {
- "Decimal" => "-180"
- }, "eastBoundLongitude" => {
- "Decimal" => "180"
- }, "southBoundLatitude" => {
- "Decimal" => "-85.04456"
- }, "northBoundLatitude" => {
- "Decimal" => "85.04456"
- }
- }
- }, "temporalElement" => {
- "EX_TemporalExtent" => {
- "extent" => {
- "TimePeriod" => {
- "gml:id" => "swathTemporalExtent", "beginPosition" => "2015-03-31T01:30:00.000Z", "endPosition" => "2021-01-01T01:29:59.999Z"
- }
- }
- }
- }
- }
- }
- }
- },
- "18. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "Soil Moisture Active Passive Mission Level 4 Surface and Root Zone Soil Moisture (L4_SM) Product Specification Document"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "Date" => "2015-10-31"
- }, "dateType" => {
- "CI_DateTypeCode" => "publication"
- }
- }
- }, "edition" => {
- "CharacterString" => "1.4"
- }, "identifier" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "L4_SM"
- }, "codeSpace" => {
- "CharacterString" => "http://gmao.gsfc.nasa.gov"
- }, "description" => {
- "CharacterString" => "A short name used by the Soil Moisture Active Passive (SMAP) mission to identify the Level 4 Radar product."
- }
- }
- }, "presentationForm" => {
- "CI_PresentationFormCode" => "documentDigital"
- }, "series" => {
- "CI_Series" => {
- "name" => {
- "CharacterString" => "L4_SM"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "19. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[2]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "DataSetId"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-12T11:50:19.050Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "DataSetId"
- }, "aggregationInfo" => {
- "MD_AggregateInformation" => {
- "aggregateDataSetIdentifier" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002"
- }
- }
- }, "associationType" => nil
- }
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "20. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[3]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "InsertTime"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-08T09:16:24.835Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "creation"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "InsertTime"
- }, "purpose" => {
- "CharacterString" => "InsertTime"
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "UpdateTime"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-12T11:50:19.050Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "UpdateTime"
- }, "purpose" => {
- "CharacterString" => "UpdateTime"
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "22. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[5]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "DIFID"
- }, "date" => {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-12T11:50:19.050Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, "identifier" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SPL4SMAU"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "DIFID"
- }, "purpose" => {
- "CharacterString" => "DIFID"
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "23. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update"
- }, "date" => [{
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-04-29T00:00:00.000Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-12T11:50:19.050Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-08T09:16:24.835Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "creation"
- }
- }
- }], "edition" => {
- "CharacterString" => "Vv2010"
- }, "identifier" => [{
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SPL4SMAU"
- }, "description" => {
- "CharacterString" => "The ECS Short Name"
- }
- }
- }, {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "002"
- }, "description" => {
- "CharacterString" => "The ECS Version ID"
- }
- }
- }, {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "doi:10.5067/JJY2V0GJNFRZ"
- }, "codeSpace" => {
- "CharacterString" => "gov.nasa.esdis.umm.doi"
- }, "description" => {
- "CharacterString" => "DOI"
- }
- }
- }], "presentationForm" => {
- "CI_PresentationFormCode" => "documentDigital"
- }, "series" => {
- "CI_Series" => {
- "name" => {
- "CharacterString" => "L4_SM"
- }
- }
- }, "otherCitationDetails" => {
- "CharacterString" => "The first Validated Release of the SMAP Level 4 Science Processing Software."
- }
- }
- }, "abstract" => {
- "CharacterString" => "The SMAP L4_SM data product provides global, 3-hourly surface and root zone soil moisture at 9 km resolution. The L4_SM data product consists of three Collections: geophysical, analysis update and land-model-constants."
- }, "purpose" => {
- "gco:nilReason" => "missing", "CharacterString" => "The SMAP L4_SM data product provides spatially and temporally complete surface and root zone soil moisture information for science and applications users."
- }, "status" => {
- "MD_ProgressCode" => "onGoing"
- }, "pointOfContact" => {
- "CI_ResponsibleParty" => {
- "organisationName" => {
- "CharacterString" => "PVC"
- }, "role" => {
- "CI_RoleCode" => "distributor"
- }
- }
- }, "descriptiveKeywords" => [{
- "MD_Keywords" => {
- "keyword" => [{
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > NONE"
- }, {
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > SURFACE SOIL MOISTURE"
- }, {
- "CharacterString" => "EARTH SCIENCE > LAND SURFACE > SOILS > SOIL MOISTURE/WATER CONTENT > NONE > NONE > ROOT ZONE SOIL MOISTURE"
- }], "type" => {
- "MD_KeywordTypeCode" => "theme"
- }, "thesaurusName" => {
- "gco:nilReason" => "unknown"
- }
- }
- }, {
- "MD_Keywords" => {
- "keyword" => {
- "CharacterString" => "Aircraft > Not provided > Not provided > "
- }
- }
- }], "language" => {
- "CharacterString" => "eng"
- }, "topicCategory" => {
- "MD_TopicCategoryCode" => "geoscientificInformation"
- }, "extent" => {
- "EX_Extent" => {
- "geographicElement" => {
- "EX_GeographicBoundingBox" => {
- "extentTypeCode" => {
- "Boolean" => "1"
- }, "westBoundLongitude" => {
- "Decimal" => "-180.0"
- }, "eastBoundLongitude" => {
- "Decimal" => "180.0"
- }, "southBoundLatitude" => {
- "Decimal" => "-85.04456"
- }, "northBoundLatitude" => {
- "Decimal" => "85.04456"
- }
- }
- }, "temporalElement" => {
- "EX_TemporalExtent" => {
- "extent" => {
- "TimePeriod" => {
- "gml:id" => "dc46625fa-ae1e-4c95-a6ae-b15dd90fe8d3", "beginPosition" => "2015-03-31T01:30:00.000Z", "endPosition" => "2021-01-01T01:29:59.999Z"
- }
- }
- }
- }
- }
- }, "processingLevel" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "Not provided"
- }, "codeSpace" => {
- "CharacterString" => "gov.nasa.esdis.umm.processinglevelid"
- }
- }
- }
- }
- },
- "24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]" => {
- "MD_DataIdentification" => {
- "citation" => {
- "CI_Citation" => {
- "title" => {
- "CharacterString" => "DataSetId"
- }, "date" => [{
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-04-29T00:00:00.000Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-12T11:50:19.050Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "revision"
- }
- }
- }, {
- "CI_Date" => {
- "date" => {
- "DateTime" => "2016-09-08T09:16:24.835Z"
- }, "dateType" => {
- "CI_DateTypeCode" => "creation"
- }
- }
- }], "citedResponsibleParty" => {
- "CI_ResponsibleParty" => {
- "organisationName" => {
- "CharacterString" => "Global Modeling and Assimilation Office"
- }, "role" => {
- "CI_RoleCode" => "originator"
- }
- }
- }
- }
- }, "abstract" => {
- "CharacterString" => "DataSetId"
- }, "resourceFormat" => {
- "MD_Format" => {
- "name" => {
- "CharacterString" => "HDF5"
- }, "version" => {
- "gco:nilReason" => "unknown"
- }
- }
- }, "aggregationInfo" => {
- "MD_AggregateInformation" => {
- "aggregateDataSetIdentifier" => {
- "MD_Identifier" => {
- "code" => {
- "CharacterString" => "SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002"
- }
- }
- }, "associationType" => {
- "DS_AssociationTypeCode" => "largerWorkCitation"
- }
- }
- }, "language" => {
- "CharacterString" => "eng"
- }
- }
- },
- "25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString" => "NSIDC DAAC > National Snow and Ice Data Center DAAC",
- "26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString" => "nsidc@nsidc.org",
- "27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL" => "http://nsidc.org/daac/",
- "28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode" => "pointOfContact",
- "29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href" => "#alaskaSARContact",
- "30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2016-04-29",
- "31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date" => "2013-01-02"
- }
- end
-
- def dif_json_report
- {
- "format" => "application/dif10+xml",
- "1. -: /DIF/Temporal_Coverage/Temporal_Range_Type" => "Long Range",
- "2. -: /DIF/Related_URL[1]" => {
- "URL_Content_Type" => {
- "Type" => "VIEW DATA SET LANDING PAGE"
- }, "URL" => "http://dx.doi.org/10.7927/H4NK3BZJ", "Description" => "data set DOI and homepage"
- },
- "3. +: /DIF/Related_URL[1]" => {
- "URL_Content_Type" => {
- "Type" => "DATA SET LANDING PAGE"
- }, "URL" => "http://dx.doi.org/10.7927/H4NK3BZJ", "Description" => "data set DOI and homepage"
- }
- }
- end
-
- def echo_json_report
- {
- "format" => "application/echo10+xml",
- "1. -: /Collection/Orderable" => "true",
- "2. -: /Collection/Visible" => "true",
- "3. -: /Collection/MaintenanceAndUpdateFrequency" => "As needed",
- "4. +: /Collection/Temporal/EndsAtPresentFlag" => "false",
- "5. +: /Collection/Temporal/RangeDateTime/BeginningDateTime" => "1970-01-01T00:00:00.000Z",
- "6. +: /Collection/Platforms/Platform/ShortName" => "Not provided",
- "7. +: /Collection/Platforms/Platform/LongName" => "Not provided",
- "8. +: /Collection/Platforms/Platform/Type" => "Not provided",
- "9. -: /Collection/AssociatedDIFs/DIF/EntryId" => "CIESIN_SEDAC_ANTHROMES_v2_1700",
- "10. -: /Collection/InsertTime" => "2014-05-13T00:00:00Z",
- "11. +: /Collection/InsertTime" => "2014-05-13T00:00:00.000Z",
- "12. -: /Collection/LastUpdate" => "2015-08-04T00:00:00Z",
- "13. +: /Collection/LastUpdate" => "2015-08-04T00:00:00.000Z",
- "14. -: /Collection/LongName" => "Anthropogenic Biomes of the World, Version 2: 1700",
- "15. +: /Collection/LongName" => "Not provided",
- "16. -: /Collection/CollectionState" => "Final",
- "17. +: /Collection/CollectionState" => "NOT PROVIDED",
- "18. -: /Collection/Price" => "0",
- "19. +: /Collection/Price" => " 0.00",
- "20. -: /Collection/SpatialKeywords/Keyword[0]" => "Africa",
- "21. -: /Collection/SpatialKeywords/Keyword[1]" => "Asia",
- "22. +: /Collection/SpatialKeywords/Keyword[0]" => "AFRICA",
- "23. +: /Collection/SpatialKeywords/Keyword[1]" => "GAZA STRIP",
- "24. -: /Collection/Contacts/Contact[0]" => {
- "Role" => "Archive", "HoursOfService" => "9:00 A.M. to 5:00 P.M., Monday to Friday", "OrganizationName" => "Socioeconomic Data and Applications Center (SEDAC)", "OrganizationAddresses" => {
- "Address" => {
- "StreetAddress" => "CIESIN, Columbia University, 61 Route 9W, P.O. Box 1000", "City" => "Palisades", "StateProvince" => "NY", "PostalCode" => "10964", "Country" => "USA"
- }
- }, "OrganizationPhones" => {
- "Phone" => [{
- "Number" => "+1 845-365-8920",
- "Type" => "Telephone"
- }, {
- "Number" => "+1 845-365-8922",
- "Type" => "Fax"
- }]
- }, "OrganizationEmails" => {
- "Email" => "ciesin.info@ciesin.columbia.edu"
- }, "ContactPersons" => {
- "ContactPerson" => {
- "FirstName" => "SEDAC", "MiddleName" => "User", "LastName" => "Services"
- }
- }
- },
- "25. +: /Collection/Contacts/Contact[0]" => {
- "Role" => "PROCESSOR", "OrganizationName" => "SEDAC"
- },
- "26. +: /Collection/Contacts/Contact[1]" => {
- "Role" => "ARCHIVER", "OrganizationName" => "SEDAC"
- },
- "27. +: /Collection/Contacts/Contact[2]" => {
- "Role" => "ARCHIVER", "HoursOfService" => "9:00 A.M. to 5:00 P.M., Monday to Friday", "OrganizationName" => "Socioeconomic Data and Applications Center (SEDAC)", "OrganizationAddresses" => {
- "Address" => {
- "StreetAddress" => "CIESIN, Columbia University, 61 Route 9W, P.O. Box 1000", "City" => "Palisades", "StateProvince" => "NY", "PostalCode" => "10964", "Country" => "USA"
- }
- }, "OrganizationPhones" => {
- "Phone" => [{
- "Number" => "+1 845-365-8920",
- "Type" => "Telephone"
- }, {
- "Number" => "+1 845-365-8922",
- "Type" => "Fax"
- }]
- }, "OrganizationEmails" => {
- "Email" => "ciesin.info@ciesin.columbia.edu"
- }, "ContactPersons" => {
- "ContactPerson" => {
- "FirstName" => "SEDAC", "MiddleName" => "User", "LastName" => "Services", "JobPosition" => "TECHNICAL CONTACT"
- }
- }
- },
- "28. -: /Collection/SpatialInfo/SpatialCoverageType" => "Horizontal",
- "29. +: /Collection/SpatialInfo/SpatialCoverageType" => "HORIZONTAL",
- "30. -: /Collection/OnlineResources/OnlineResource/Type" => "DOI URL",
- "31. +: /Collection/OnlineResources/OnlineResource/Type" => "CollectionURL : DATA SET LANDING PAGE",
- "32. -: /Collection/Spatial/SpatialCoverageType" => "Horizontal",
- "33. +: /Collection/Spatial/SpatialCoverageType" => "HORIZONTAL",
- "34. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate" => "-180.000000",
- "35. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate" => "-180.0",
- "36. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate" => "90.000000",
- "37. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate" => "90.0",
- "38. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate" => "180.000000",
- "39. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate" => "180.0",
- "40. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate" => "-90.000000",
- "41. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate" => "-90.0"
- }
- end
-
- def iso_text_report
- 'application/iso:smap+xml
-
- 1. -: /DS_Series/schemaLocation
- 2. -: /DS_Series/seriesMetadata/MI_Metadata/fileIdentifier/FileName
- 3. -: /DS_Series/seriesMetadata/MI_Metadata/characterSet/MD_CharacterSetCode
- 4. -: /DS_Series/seriesMetadata/MI_Metadata/hierarchyLevel/MD_ScopeCode
- 5. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/attributeDescription
- 6. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/contentType
- 7. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/code/CharacterString
- 8. +: /DS_Series/seriesMetadata/MI_Metadata/contentInfo/MD_ImageDescription/processingLevelCode/MD_Identifier/codeSpace/CharacterString
- 9. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardName/CharacterString
- 10. +: /DS_Series/seriesMetadata/MI_Metadata/dataQualityInfo/DQ_DataQuality/scope/DQ_Scope/level/MD_ScopeCode
- 11. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/id
- 12. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/code/CharacterString
- 13. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/identifier/MD_Identifier/codeSpace/CharacterString
- 14. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/description/CharacterString
- 15. +: /DS_Series/seriesMetadata/MI_Metadata/acquisitionInformation/MI_AcquisitionInformation/platform/EOS_Platform/instrument/nilReason
- 16. -: /DS_Series/seriesMetadata/MI_Metadata/metadataStandardVersion/CharacterString
- 17. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]
- 18. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]
- 19. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[2]
- 20. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[3]
- 21. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[4]
- 22. -: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[5]
- 23. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[0]
- 24. +: /DS_Series/seriesMetadata/MI_Metadata/identificationInfo[1]
- 25. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/organisationName/CharacterString
- 26. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString
- 27. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL
- 28. -: /DS_Series/seriesMetadata/MI_Metadata/contact/CI_ResponsibleParty/role/CI_RoleCode
- 29. +: /DS_Series/seriesMetadata/MI_Metadata/contact/href
- 30. -: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date
- 31. +: /DS_Series/seriesMetadata/MI_Metadata/dateStamp/Date'
- end
-
- def echo_text_report
- 'application/echo10+xml
-
- 1. -: /Collection/Orderable
- 2. -: /Collection/Visible
- 3. -: /Collection/MaintenanceAndUpdateFrequency
- 4. +: /Collection/Temporal/EndsAtPresentFlag
- 5. +: /Collection/Temporal/RangeDateTime/BeginningDateTime
- 6. +: /Collection/Platforms/Platform/ShortName
- 7. +: /Collection/Platforms/Platform/LongName
- 8. +: /Collection/Platforms/Platform/Type
- 9. -: /Collection/AssociatedDIFs/DIF/EntryId
- 10. -: /Collection/InsertTime
- 11. +: /Collection/InsertTime
- 12. -: /Collection/LastUpdate
- 13. +: /Collection/LastUpdate
- 14. -: /Collection/LongName
- 15. +: /Collection/LongName
- 16. -: /Collection/CollectionState
- 17. +: /Collection/CollectionState
- 18. -: /Collection/Price
- 19. +: /Collection/Price
- 20. -: /Collection/SpatialKeywords/Keyword[0]
- 21. -: /Collection/SpatialKeywords/Keyword[1]
- 22. +: /Collection/SpatialKeywords/Keyword[0]
- 23. +: /Collection/SpatialKeywords/Keyword[1]
- 24. -: /Collection/Contacts/Contact[0]
- 25. +: /Collection/Contacts/Contact[0]
- 26. +: /Collection/Contacts/Contact[1]
- 27. +: /Collection/Contacts/Contact[2]
- 28. -: /Collection/SpatialInfo/SpatialCoverageType
- 29. +: /Collection/SpatialInfo/SpatialCoverageType
- 30. -: /Collection/OnlineResources/OnlineResource/Type
- 31. +: /Collection/OnlineResources/OnlineResource/Type
- 32. -: /Collection/Spatial/SpatialCoverageType
- 33. +: /Collection/Spatial/SpatialCoverageType
- 34. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate
- 35. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/WestBoundingCoordinate
- 36. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate
- 37. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/NorthBoundingCoordinate
- 38. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate
- 39. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/EastBoundingCoordinate
- 40. -: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate
- 41. +: /Collection/Spatial/HorizontalSpatialDomain/Geometry/BoundingRectangle/SouthBoundingCoordinate'
- end
-
- def dif_text_report
- 'application/dif10+xml
-
- 1. -: /DIF/Temporal_Coverage/Temporal_Range_Type
- 2. -: /DIF/Related_URL[1]
- 3. +: /DIF/Related_URL[1]'
- end
- end
-end
From 38b6d8f9f678581a1be26be8812d36cf83f3f5a6 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Thu, 6 Aug 2020 12:49:50 -0400
Subject: [PATCH 41/49] MMT-2313: fixed fixture file paths
---
spec/features/collections/loss_report_spec.rb | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/spec/features/collections/loss_report_spec.rb b/spec/features/collections/loss_report_spec.rb
index e240add59..fa179e12a 100644
--- a/spec/features/collections/loss_report_spec.rb
+++ b/spec/features/collections/loss_report_spec.rb
@@ -1,7 +1,7 @@
describe 'Displaying the loss report in browser' do
context 'when accessing the loss report' do
-
+
let(:echo_concept_id) { echo_concept_id = cmr_client.get_collections({'EntryTitle': 'Anthropogenic Biomes of the World, Version 2: 1700'}).body.dig('items',0,'meta','concept-id') }
let(:dif_concept_id) { dif_concept_id = cmr_client.get_collections({'EntryTitle': '2000 Pilot Environmental Sustainability Index (ESI)'}).body.dig('items',0,'meta','concept-id') }
let(:iso_concept_id) { iso_concept_id = cmr_client.get_collections({'EntryTitle': 'SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil Moisture Analysis Update V002'}).body.dig('items',0,'meta','concept-id') }
@@ -13,18 +13,18 @@
context 'when displaying json' do
it 'properly displays the echo json report' do
visit loss_report_collections_path(echo_concept_id, format:'json')
- expect(page.text.gsub(/\s+/, "")).to have_text(File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_echo_sample.json').gsub(/\s+/, ""))
+ expect(page.text.gsub(/\s+/, "")).to have_text(File.read('spec/fixtures/loss_report_samples/loss_report_echo_sample.json').gsub(/\s+/, ""))
end
it 'properly displays the dif json report' do
visit loss_report_collections_path(dif_concept_id, format:'json')
- expect(page.text.gsub(/\s+/, "")).to have_text(File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_dif_sample.json').gsub(/\s+/, ""))
+ expect(page.text.gsub(/\s+/, "")).to have_text(File.read('spec/fixtures/loss_report_samples/loss_report_dif_sample.json').gsub(/\s+/, ""))
end
it 'properly displays the iso json report' do
visit loss_report_collections_path(iso_concept_id, format:'json')
# the reason this iso example has to be split is that cmr adds/updates a couple 'id' attributes
# in the actual collection (every time it is translated) and therefore the the comparison report will always include these changes
# except with a different value for the 'id' attribute. In order to bypass this issue we ignore the 'id' changes by using them as #split delimiters
- string_part = File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_iso_sample.json').gsub(/\s+/, "").split(/dc714eaf5-01b3-4705-9031-f35c87e98529|dd8cd38ba-0984-4af1-9a10-b6e303388cc4/)
+ string_part = File.read('spec/fixtures/loss_report_samples/loss_report_iso_sample.json').gsub(/\s+/, "").split(/dc714eaf5-01b3-4705-9031-f35c87e98529|dd8cd38ba-0984-4af1-9a10-b6e303388cc4/)
expect(page.text.gsub(/\s+/, "")).to have_text(string_part[0])
expect(page.text.gsub(/\s+/, "")).to have_text(string_part[1])
expect(page.text.gsub(/\s+/, "")).to have_text(string_part[2])
@@ -35,15 +35,15 @@
it 'properly displays the echo text report' do
visit loss_report_collections_path(echo_concept_id, format:'text')
- expect(page.text.gsub(/\s+/, "")).to have_text(File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_echo_sample.text').gsub(/\s+/, ""))
+ expect(page.text.gsub(/\s+/, "")).to have_text(File.read('spec/fixtures/loss_report_samples/loss_report_echo_sample.text').gsub(/\s+/, ""))
end
it 'properly displays the dif text report' do
visit loss_report_collections_path(dif_concept_id, format:'text')
- expect(page.text.gsub(/\s+/, "")).to have_text(File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_dif_sample.text').gsub(/\s+/, ""))
+ expect(page.text.gsub(/\s+/, "")).to have_text(File.read('spec/fixtures/loss_report_samples/loss_report_dif_sample.text').gsub(/\s+/, ""))
end
it 'properly displays the iso text report' do
visit loss_report_collections_path(iso_concept_id, format:'text')
- expect(page.text.gsub(/\s+/, "")).to have_text(File.read('/Users/ctrummer/mmt/spec/fixtures/loss_report_samples/loss_report_iso_sample.text').gsub(/\s+/, ""))
+ expect(page.text.gsub(/\s+/, "")).to have_text(File.read('spec/fixtures/loss_report_samples/loss_report_iso_sample.text').gsub(/\s+/, ""))
end
end
end
From cd66642585297012cc3d75df57853aa3f8ae9cd4 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Thu, 6 Aug 2020 13:24:26 -0400
Subject: [PATCH 42/49] MMT-2313: removed unncessary ternary operator
---
app/helpers/loss_report_helper.rb | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index af09337dc..d678b22c5 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -75,7 +75,7 @@ def loss_report_output(compared_collections, hide_items: true, display: 'text')
def is_xml?(element)
# checks if the element being passed is xml
# may be beneficial to add more checks
- element.include?('<' && '' && '>') ? true : false
+ element.include?('<' && '' && '>')
end
def is_attribute?(element)
From bb7e6bfbdc9e1bd144d53634e5c12a8f97be1ede Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Mon, 10 Aug 2020 09:30:49 -0400
Subject: [PATCH 43/49] MMT-2313: made more PR change requests
---
app/controllers/collections_controller.rb | 16 +++++++++------
app/helpers/loss_report_helper.rb | 24 +++++++++++++----------
2 files changed, 24 insertions(+), 16 deletions(-)
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index 948abded7..ad3469655 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -5,7 +5,6 @@ class CollectionsController < ManageCollectionsController
include LossReportHelper
before_action :set_collection
- before_action :prepare_translated_collections
before_action :ensure_correct_collection_provider, only: [:edit, :clone, :revert, :destroy]
layout 'collection_preview', only: [:show]
@@ -125,8 +124,13 @@ def loss_report
# is false when the cmr calls aren't successful.
compared_collections = prepare_translated_collections
respond_to do |format|
- format.text { render plain: JSON.pretty_generate(@collection) + (compared_collections ? loss_report_output(compared_collections, hide_items: true, display: 'text') : 'Failure to get_concept or translate_collection' )}
- format.json { render json: JSON.pretty_generate(compared_collections ? loss_report_output(compared_collections, hide_items: false, display: 'json') : {"error"=>"Failure to get_concept or translate_collection"}) }
+ if compared_collections[:error]
+ format.text { render plain: compared_collections[:error] }
+ format.json { render json: JSON.pretty_generate(compared_collections) }
+ else
+ format.text { render plain: loss_report_output(compared_collections: compared_collections, hide_items: true, display: 'text') }
+ format.json { render json: JSON.pretty_generate(loss_report_output(compared_collections: compared_collections, hide_items: false, display: 'json')) }
+ end
end
end
@@ -144,13 +148,13 @@ def ensure_correct_collection_provider
def prepare_translated_collections
original_collection_native_xml = cmr_client.get_concept(params[:id],token, {})
- return false if !original_collection_native_xml.success?
+ return { error: 'Failed to retrieve collection from CMR' } unless original_collection_native_xml.success?
content_type = original_collection_native_xml.headers.fetch('content-type').split(';')[0]
- return false if content_type.include?('application/vnd.nasa.cmr.umm+json;version=')
+ return { error: 'This collection is already in UMM format so there is no loss report' } if content_type.include?('application/vnd.nasa.cmr.umm+json')
translated_collection_native_xml = cmr_client.translate_collection(JSON.pretty_generate(@collection), "application/#{Rails.configuration.umm_c_version}; charset=utf-8", content_type, skip_validation=true)
- return false if !translated_collection_native_xml.success?
+ return { error: 'Failed to translate collection from UMM back to native format' } unless translated_collection_native_xml.success?
return {
original_collection_native_xml: original_collection_native_xml.body,
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index d678b22c5..0f07f412f 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -1,6 +1,6 @@
module LossReportHelper
- def loss_report_output(compared_collections, hide_items: true, display: 'text')
+ def loss_report_output(compared_collections: compared_collections, hide_items: true, display: 'text')
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
# this display feature could be a good candidate for dependency injection
@@ -19,13 +19,14 @@ def loss_report_output(compared_collections, hide_items: true, display: 'text')
conv = Nokogiri::XML(compared_collections[:translated_collection_native_xml]) { |config| config.strict.noblanks }
end
- arr_paths = Array.new # This array is used to keep track of the paths that lead to arrays that have already been mapped
+ # This array is used to keep track of the paths that lead to arrays that have already been mapped
+ arr_paths = Array.new
if display == 'text'
- text_output = String.new
+ text_output = String.new and json_output = nil
text_output += (compared_collections[:native_format] + "\n\n")
elsif display == 'json'
- json_output = Hash.new
+ json_output = Hash.new and text_output = nil
json_output['format'] = compared_collections[:native_format]
end
@@ -45,10 +46,10 @@ def loss_report_output(compared_collections, hide_items: true, display: 'text')
elsif path_not_checked?(path, arr_paths)
# this layer of if/else separates items that contain xml (this is a nokogiri oddity that occurs where
# Nokogiri does not directly map to an item that is changed thus it still contains xml - this is the
- # purpose of hash_map), items that represent xml attribute changes, and normal changes.
+ # purpose of hash_values_and_paths), items that represent xml attribute changes, and normal changes.
if is_xml?(element)
element = Hash.from_xml(element)
- hash_map(element).each do |item|
+ hash_values_and_paths(element).each do |item|
arr_path = top_level_arr_path("#{path}/#{item['path']}", orig_h, conv_h)
# this layer of if/else structure is used to separately evaluate implicit array changes in the xml.
# This is why arr_path will evaluate true if the element in question is an array
@@ -75,7 +76,7 @@ def loss_report_output(compared_collections, hide_items: true, display: 'text')
def is_xml?(element)
# checks if the element being passed is xml
# may be beneficial to add more checks
- element.include?('<' && '' && '>')
+ element.include?('<' && '' && '>')
end
def is_attribute?(element)
@@ -103,7 +104,10 @@ def top_level_arr_path(path, orig_h, conv_h)
pre_translation_array, pre_translation_path = hash_navigation(path, orig_h)
post_translation_array, post_translation_path = hash_navigation(path, conv_h)
- return false if pre_translation_array == false && post_translation_array == false
+ # the following line handles a scenario where hash_navigation returns false for both pre_ and post_translation_arrays
+ # which means that the path passed does not exist in the original or converted collections
+ return path_exists = false if pre_translation_array == false && post_translation_array == false
+
return pre_translation_path if pre_translation_array.is_a?(Array)
return post_translation_path if post_translation_array.is_a?(Array)
@@ -128,10 +132,10 @@ def add_to_report(change, element, path, hide_items, display, json_output, text_
return json_output["#{@counter}. #{change}: #{path}"] = element if display == 'json'
end
- def hash_map(hash)
+ def hash_values_and_paths(hash)
buckets = Array.new
hash.each do |key,val|
- if val.is_a? Hash then hash_map(val).each do |item|
+ if val.is_a? Hash then hash_values_and_paths(val).each do |item|
item['path'] = key + '/' + item['path']
buckets << item end
else
From 573a15129cbf0d474f18a8ee0de635993000eacc Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Mon, 10 Aug 2020 15:55:10 -0400
Subject: [PATCH 44/49] MMT-2313: adjusted keyword arguments and a variable
definition
---
app/controllers/collections_controller.rb | 2 +-
app/helpers/loss_report_helper.rb | 8 +++++---
2 files changed, 6 insertions(+), 4 deletions(-)
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index ad3469655..2127d7f60 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -129,7 +129,7 @@ def loss_report
format.json { render json: JSON.pretty_generate(compared_collections) }
else
format.text { render plain: loss_report_output(compared_collections: compared_collections, hide_items: true, display: 'text') }
- format.json { render json: JSON.pretty_generate(loss_report_output(compared_collections: compared_collections, hide_items: false, display: 'json')) }
+ format.json { render json: JSON.pretty_generate(loss_report_output(compared_collections: compared_collections, display: 'json')) }
end
end
end
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 0f07f412f..7bc482f62 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -1,6 +1,6 @@
module LossReportHelper
- def loss_report_output(compared_collections: compared_collections, hide_items: true, display: 'text')
+ def loss_report_output(compared_collections:, hide_items: true, display: 'text')
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
# this display feature could be a good candidate for dependency injection
@@ -23,10 +23,12 @@ def loss_report_output(compared_collections: compared_collections, hide_items: t
arr_paths = Array.new
if display == 'text'
- text_output = String.new and json_output = nil
+ text_output = String.new
+ json_output = nil
text_output += (compared_collections[:native_format] + "\n\n")
elsif display == 'json'
- json_output = Hash.new and text_output = nil
+ json_output = Hash.new
+ text_output = nil
json_output['format'] = compared_collections[:native_format]
end
From 2b18ad08f5e416629751060a609834a287c00fe1 Mon Sep 17 00:00:00 2001
From: ryanmiller-1 <40173609+ryanmiller-1@users.noreply.github.com>
Date: Mon, 10 Aug 2020 16:45:37 -0400
Subject: [PATCH 45/49] MMT-2350 adding a wait for indexing after deleting a
provider in reset_provider (#623)
---
lib/test_cmr/load_data.rb | 2 ++
1 file changed, 2 insertions(+)
diff --git a/lib/test_cmr/load_data.rb b/lib/test_cmr/load_data.rb
index be1775de0..46fd53fbf 100644
--- a/lib/test_cmr/load_data.rb
+++ b/lib/test_cmr/load_data.rb
@@ -678,6 +678,8 @@ def reset_provider(provider_id)
end
guid = "prov-guid-#{Time.now.to_i}"
+ # Wait for the cascade delete to finish or else we may create races in CMR
+ wait_for_indexing
# Recreate provider in Ingest
resp = connection.post do |req|
From 35c022fd010d8eab4f52d6534a6b48523a572ccf Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Tue, 11 Aug 2020 09:53:59 -0400
Subject: [PATCH 46/49] MMT-2313: changed use of hide_items parameter
---
app/controllers/collections_controller.rb | 19 ++++++++++++++++---
app/helpers/loss_report_helper.rb | 2 +-
2 files changed, 17 insertions(+), 4 deletions(-)
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index 2127d7f60..1f9ed55b3 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -128,8 +128,8 @@ def loss_report
format.text { render plain: compared_collections[:error] }
format.json { render json: JSON.pretty_generate(compared_collections) }
else
- format.text { render plain: loss_report_output(compared_collections: compared_collections, hide_items: true, display: 'text') }
- format.json { render json: JSON.pretty_generate(loss_report_output(compared_collections: compared_collections, display: 'json')) }
+ format.text { render plain: loss_report_output(compared_collections: compared_collections, hide_items: compared_collections[:hide_items], display: 'text') }
+ format.json { render json: JSON.pretty_generate(loss_report_output(compared_collections: compared_collections, hide_items: compared_collections[:hide_items], display: 'json')) }
end
end
end
@@ -156,12 +156,25 @@ def prepare_translated_collections
translated_collection_native_xml = cmr_client.translate_collection(JSON.pretty_generate(@collection), "application/#{Rails.configuration.umm_c_version}; charset=utf-8", content_type, skip_validation=true)
return { error: 'Failed to translate collection from UMM back to native format' } unless translated_collection_native_xml.success?
+ # this checks the 'hide_items' url parameter that is can be manually added. Its primary use is for developers
+ # that need to debug using the text_output
+ hide_items = if params[:hide_items].nil? || params[:hide_items].downcase == 'true'
+ true
+ elsif params[:hide_items].downcase == 'false'
+ false
+ else
+ 'error'
+ end
+
+ return { error: 'Unknown value for the hide_items parameter. The format should be: ".../loss_report.text?hide_items=true" or ".../loss_report.text?hide_items=false"' } if hide_items == 'error'
+
return {
original_collection_native_xml: original_collection_native_xml.body,
translated_collection_native_xml: translated_collection_native_xml.body,
original_collection_native_hash: Hash.from_xml(original_collection_native_xml.body),
translated_collection_native_hash: Hash.from_xml(translated_collection_native_xml.body),
- native_format: content_type
+ native_format: content_type,
+ hide_items: hide_items
}
end
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index 7bc482f62..f74408793 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -1,6 +1,6 @@
module LossReportHelper
- def loss_report_output(compared_collections:, hide_items: true, display: 'text')
+ def loss_report_output(compared_collections:, hide_items:, display: 'text')
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
# this display feature could be a good candidate for dependency injection
From dda75e59f0a42326b9b6954b62e6367c75ecc60b Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Tue, 11 Aug 2020 10:10:38 -0400
Subject: [PATCH 47/49] MMT-2313: made :display non-default
---
app/helpers/loss_report_helper.rb | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index f74408793..bb62b3939 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -1,6 +1,6 @@
module LossReportHelper
- def loss_report_output(compared_collections:, hide_items:, display: 'text')
+ def loss_report_output(compared_collections:, hide_items:, display:)
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
# this display feature could be a good candidate for dependency injection
From bde318dec1dad3485fd9cec7f8f0f9f1bf8289d4 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Tue, 11 Aug 2020 11:54:15 -0400
Subject: [PATCH 48/49] MMT-2313: completed change requests
---
app/controllers/collections_controller.rb | 42 +++++++++++------------
app/helpers/loss_report_helper.rb | 20 +++++------
2 files changed, 30 insertions(+), 32 deletions(-)
diff --git a/app/controllers/collections_controller.rb b/app/controllers/collections_controller.rb
index 1f9ed55b3..379c13641 100644
--- a/app/controllers/collections_controller.rb
+++ b/app/controllers/collections_controller.rb
@@ -120,16 +120,27 @@ def loss_report
# When a user wants to use MMT to edit metadata that currently exists in a non-UMM form,
# it's important that they're able to see if any data loss occurs in the translation to umm.
# This method is needed to reference the appropriate helper and view for the lossiness report.
- # If compared_collections is false, the error message will appear. Note that compared_collections
- # is false when the cmr calls aren't successful.
- compared_collections = prepare_translated_collections
+ # If translated_collections contains an :error field, the error message will appear.
+
+ # this checks the 'hide_items' url parameter that is can be manually added. Its primary use is for developers
+ # that need to debug using the text_output
+ if params[:hide_items].nil? || params[:hide_items].downcase == 'true'
+ hide_items = true
+ elsif params[:hide_items].downcase == 'false'
+ hide_items = false
+ else
+ translated_collections = { error: 'Unknown value for the hide_items parameter. The format should be: ".../loss_report.text?hide_items=true" or ".../loss_report.text?hide_items=false"' }
+ end
+
+ translated_collections ||= prepare_translated_collections
+
respond_to do |format|
- if compared_collections[:error]
- format.text { render plain: compared_collections[:error] }
- format.json { render json: JSON.pretty_generate(compared_collections) }
+ if translated_collections[:error]
+ format.text { render plain: translated_collections[:error] }
+ format.json { render json: JSON.pretty_generate(translated_collections) }
else
- format.text { render plain: loss_report_output(compared_collections: compared_collections, hide_items: compared_collections[:hide_items], display: 'text') }
- format.json { render json: JSON.pretty_generate(loss_report_output(compared_collections: compared_collections, hide_items: compared_collections[:hide_items], display: 'json')) }
+ format.text { render plain: loss_report_output(translated_collections: translated_collections, hide_items: hide_items, display: 'text') }
+ format.json { render json: JSON.pretty_generate(loss_report_output(translated_collections: translated_collections, hide_items: hide_items, display: 'json')) }
end
end
end
@@ -156,25 +167,12 @@ def prepare_translated_collections
translated_collection_native_xml = cmr_client.translate_collection(JSON.pretty_generate(@collection), "application/#{Rails.configuration.umm_c_version}; charset=utf-8", content_type, skip_validation=true)
return { error: 'Failed to translate collection from UMM back to native format' } unless translated_collection_native_xml.success?
- # this checks the 'hide_items' url parameter that is can be manually added. Its primary use is for developers
- # that need to debug using the text_output
- hide_items = if params[:hide_items].nil? || params[:hide_items].downcase == 'true'
- true
- elsif params[:hide_items].downcase == 'false'
- false
- else
- 'error'
- end
-
- return { error: 'Unknown value for the hide_items parameter. The format should be: ".../loss_report.text?hide_items=true" or ".../loss_report.text?hide_items=false"' } if hide_items == 'error'
-
return {
original_collection_native_xml: original_collection_native_xml.body,
translated_collection_native_xml: translated_collection_native_xml.body,
original_collection_native_hash: Hash.from_xml(original_collection_native_xml.body),
translated_collection_native_hash: Hash.from_xml(translated_collection_native_xml.body),
- native_format: content_type,
- hide_items: hide_items
+ native_format: content_type
}
end
diff --git a/app/helpers/loss_report_helper.rb b/app/helpers/loss_report_helper.rb
index bb62b3939..7819fc4f5 100644
--- a/app/helpers/loss_report_helper.rb
+++ b/app/helpers/loss_report_helper.rb
@@ -1,22 +1,22 @@
module LossReportHelper
- def loss_report_output(compared_collections:, hide_items:, display:)
+ def loss_report_output(translated_collections:, hide_items:, display:)
# depending on the input selection (json or text) a comparison string/hash is created and displayed in-browser
# this display feature could be a good candidate for dependency injection
- orig_h = compared_collections[:original_collection_native_hash]
- conv_h = compared_collections[:translated_collection_native_hash]
+ orig_h = translated_collections[:original_collection_native_hash]
+ conv_h = translated_collections[:translated_collection_native_hash]
# ISO and DIF collections (in XML form) contain namespaces that cause errors in the below comparison.
# Specifically, when nodes are evaluated individually, (their namespace definitions remaining at the top of the xml)
# their prefixes are undefined in the scope of the evaluation and therefore raise errors. Removing the namespaces
# eliminates this issue.
- if compared_collections[:native_format].include?('iso') || compared_collections[:native_format].include?('dif')
- orig = Nokogiri::XML(compared_collections[:original_collection_native_xml]) { |config| config.strict.noblanks }.remove_namespaces!
- conv = Nokogiri::XML(compared_collections[:translated_collection_native_xml]) { |config| config.strict.noblanks }.remove_namespaces!
+ if translated_collections[:native_format].include?('iso') || translated_collections[:native_format].include?('dif')
+ orig = Nokogiri::XML(translated_collections[:original_collection_native_xml]) { |config| config.strict.noblanks }.remove_namespaces!
+ conv = Nokogiri::XML(translated_collections[:translated_collection_native_xml]) { |config| config.strict.noblanks }.remove_namespaces!
else
- orig = Nokogiri::XML(compared_collections[:original_collection_native_xml]) { |config| config.strict.noblanks }
- conv = Nokogiri::XML(compared_collections[:translated_collection_native_xml]) { |config| config.strict.noblanks }
+ orig = Nokogiri::XML(translated_collections[:original_collection_native_xml]) { |config| config.strict.noblanks }
+ conv = Nokogiri::XML(translated_collections[:translated_collection_native_xml]) { |config| config.strict.noblanks }
end
# This array is used to keep track of the paths that lead to arrays that have already been mapped
@@ -25,11 +25,11 @@ def loss_report_output(compared_collections:, hide_items:, display:)
if display == 'text'
text_output = String.new
json_output = nil
- text_output += (compared_collections[:native_format] + "\n\n")
+ text_output += (translated_collections[:native_format] + "\n\n")
elsif display == 'json'
json_output = Hash.new
text_output = nil
- json_output['format'] = compared_collections[:native_format]
+ json_output['format'] = translated_collections[:native_format]
end
# Below is the Nokogiri#diff method that is used to compare Nokogiri::XML objects.
From cd12de0d162074ecd29219aa6eef9d5f1eec3cc0 Mon Sep 17 00:00:00 2001
From: Christian Trummer
Date: Tue, 11 Aug 2020 16:46:45 -0400
Subject: [PATCH 49/49] MMT-2313: adjusted spec to be more universal
---
spec/features/collections/loss_report_spec.rb | 33 ++++++++++++++-----
1 file changed, 25 insertions(+), 8 deletions(-)
diff --git a/spec/features/collections/loss_report_spec.rb b/spec/features/collections/loss_report_spec.rb
index fa179e12a..2439a065b 100644
--- a/spec/features/collections/loss_report_spec.rb
+++ b/spec/features/collections/loss_report_spec.rb
@@ -19,15 +19,24 @@
visit loss_report_collections_path(dif_concept_id, format:'json')
expect(page.text.gsub(/\s+/, "")).to have_text(File.read('spec/fixtures/loss_report_samples/loss_report_dif_sample.json').gsub(/\s+/, ""))
end
+
it 'properly displays the iso json report' do
visit loss_report_collections_path(iso_concept_id, format:'json')
- # the reason this iso example has to be split is that cmr adds/updates a couple 'id' attributes
- # in the actual collection (every time it is translated) and therefore the the comparison report will always include these changes
- # except with a different value for the 'id' attribute. In order to bypass this issue we ignore the 'id' changes by using them as #split delimiters
- string_part = File.read('spec/fixtures/loss_report_samples/loss_report_iso_sample.json').gsub(/\s+/, "").split(/dc714eaf5-01b3-4705-9031-f35c87e98529|dd8cd38ba-0984-4af1-9a10-b6e303388cc4/)
- expect(page.text.gsub(/\s+/, "")).to have_text(string_part[0])
- expect(page.text.gsub(/\s+/, "")).to have_text(string_part[1])
- expect(page.text.gsub(/\s+/, "")).to have_text(string_part[2])
+ sample_paths = JSON.parse(File.read('spec/fixtures/loss_report_samples/loss_report_iso_sample.json')).keys.map! { |path| path.split(': ').last }
+ sample_values = JSON.parse(File.read('spec/fixtures/loss_report_samples/loss_report_iso_sample.json')).values
+ page_paths = JSON.parse(page.text).keys.map! { |path| path.split(': ').last }
+ page_values = JSON.parse(page.text).values
+
+ # the reason this iso example will have 2 discrepancies (seen in the bottom two 'expect' lines) is because
+ # every time an iso collection is translated a few 'id' attributes are generated by CMR and they are always different.
+ # This means that the sample reports will contain different 'id' values and therefore cannot be compared directly in this example.
+ # In order to bypass this issue we ignore the 'id' changes by expecting two 'id' values that are different, hence the
+ # '2' expectation.
+
+ expect(sample_paths - page_paths).to be_empty
+ expect(page_paths - sample_paths).to be_empty
+ expect((sample_values - page_values).length).to be(2)
+ expect((page_values - sample_values).length).to be(2)
end
end
@@ -43,7 +52,15 @@
end
it 'properly displays the iso text report' do
visit loss_report_collections_path(iso_concept_id, format:'text')
- expect(page.text.gsub(/\s+/, "")).to have_text(File.read('spec/fixtures/loss_report_samples/loss_report_iso_sample.text').gsub(/\s+/, ""))
+
+ # the following two lines extract all the paths from the Capybara page and from the sample report.
+ # from there, the two arrays of paths are compared to ensure the page does not hold different paths than the sample
+ # this is necessary because of how CMR translates ISO records. See above 'json iso report' for more details
+ sample_paths = File.read('spec/fixtures/loss_report_samples/loss_report_iso_sample.text').split(/\s|\n/).reject! { |path| !path.include?("/") }
+ page_paths = page.text.split("\s").reject! { |path| !path.include?("/") }
+
+ expect(sample_paths - page_paths).to be_empty
+ expect(page_paths - sample_paths).to be_empty
end
end
end