Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
3.7.20
3.7.21
4 changes: 2 additions & 2 deletions lib/gooddata/helpers/global_helpers_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def stringify_values(value)

def resolve_reference_params(data_params, params)
reference_values = []
regexps = Regexp.union(/\\\\/, /\\\$/, /\$\{(\w+)\}/)
regexps = Regexp.union(/\\\\/, /\\\$/, /\$\{([\w\s\.]+)\}/)
resolve_reference = lambda do |v|
if v.is_a? Hash
Hash[
Expand All @@ -262,7 +262,7 @@ def resolve_reference_params(data_params, params)
data_params.is_a?(Hash) ? '\\' : '\\\\' # rubocop: disable Metrics/BlockNesting
elsif match =~ /\\\$/
'$'
elsif match =~ /\$\{(\w+)\}/
elsif match =~ /\$\{([\w\s\.]+)\}/
val = params["#{$1}"]
if val
reference_values << val
Expand Down
116 changes: 116 additions & 0 deletions lib/gooddata/lcm/actions/migrate_gdc_date_dimension.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
# frozen_string_literal: true
# (C) 2019-2020 GoodData Corporation
require_relative 'base_action'

# Migrate date dimension urn:gooddata:date or urn:custom:date to urn:custom_v2:date
module GoodData
module LCM2
class MigrateGdcDateDimension < BaseAction
DESCRIPTION = 'Migrate Gdc Date Dimension'
DATE_DIMENSION_CUSTOM_V2 = 'urn:custom_v2:date'
DATE_DIMENSION_OLD = %w[urn:gooddata:date urn:custom:date]

PARAMS = define_params(self) do
description 'Client Used for Connecting to GD'
param :gdc_gd_client, instance_of(Type::GdClientType), required: true

description 'Specifies how to synchronize LDM and resolve possible conflicts'
param :synchronize_ldm, instance_of(Type::SynchronizeLDM), required: false, default: 'diff_against_master_with_fallback'

description 'Synchronization Info'
param :synchronize, array_of(instance_of(Type::SynchronizationInfoType)), required: true, generated: true
end

RESULT_HEADER = %i[from to status]

class << self
def call(params)
results = []
params.synchronize.map do |segment_info|
result = migrate_date_dimension(params, segment_info)
results.concat(result)
end

{
results: results
}
end

def migrate_date_dimension(params, segment_info)
results = []
client = params.gdc_gd_client
latest_blueprint = segment_info[:from_blueprint]
# don't migrate when latest master doesn't contain custom v2 date.
return results unless contain_v2?(latest_blueprint)

previous_blueprint = segment_info[:previous_master]&.blueprint
# check latest master and previous master
master_upgrade_datasets = get_upgrade_dates(latest_blueprint, previous_blueprint) if params[:synchronize_ldm].downcase == 'diff_against_master' && previous_blueprint
unless master_upgrade_datasets&.empty?
segment_info[:to].pmap do |entry|
pid = entry[:pid]
to_project = client.projects(pid) || fail("Invalid 'to' project specified - '#{pid}'")
to_blueprint = to_project.blueprint
upgrade_datasets = get_upgrade_dates(latest_blueprint, to_blueprint)
next if upgrade_datasets.empty?

message = get_upgrade_message(upgrade_datasets)

results << {
from: segment_info[:from],
to: pid,
status: to_project.upgrade_custom_v2(message)
}
end
end

results
end

def get_upgrade_dates(src_blueprint, dest_blueprint)
dest_dates = get_date_dimensions(dest_blueprint) if dest_blueprint
src_dates = get_date_dimensions(src_blueprint) if src_blueprint

return false if dest_dates.empty? || src_dates.empty?

upgrade_datasets = []
dest_dates.each do |dest|
src_dim = get_date_dimension(src_blueprint, dest[:id])
next unless src_dim

upgrade_datasets << src_dim[:identifier] if upgrade?(src_dim, dest) && src_dim[:identifier]
end

upgrade_datasets
end

def get_upgrade_message(upgrade_datasets)
{
upgrade: {
dateDatasets: {
upgrade: "exact",
datasets: upgrade_datasets
}
}
}
end

def upgrade?(src_dim, dest_dim)
src_dim[:urn] == DATE_DIMENSION_CUSTOM_V2 && DATE_DIMENSION_OLD.any? { |e| dest_dim[:urn] == e }
end

def contain_v2?(blueprint)
get_date_dimensions(blueprint).any? { |e| e[:urn] == DATE_DIMENSION_CUSTOM_V2 }
end

def get_date_dimension(blueprint, id)
GoodData::Model::ProjectBlueprint.find_date_dimension(blueprint, id)
end

def get_date_dimensions(blueprint)
GoodData::Model::ProjectBlueprint.date_dimensions(blueprint)
end
end
end
end
end
4 changes: 3 additions & 1 deletion lib/gooddata/lcm/actions/synchronize_ldm.rb
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ class SynchronizeLdm < BaseAction
param :include_deprecated, instance_of(Type::BooleanType), required: false, default: false
end

RESULT_HEADER = %i[from to status]

class << self
def call(params)
results = []
Expand Down Expand Up @@ -76,9 +78,9 @@ def sync_segment_ldm(params, segment_info)
include_deprecated = params.include_deprecated.to_b
from_pid = segment_info[:from]
from = params.development_client.projects(from_pid) || fail("Invalid 'from' project specified - '#{from_pid}'")

GoodData.logger.info "Creating Blueprint, project: '#{from.title}', PID: #{from_pid}"
blueprint = from.blueprint(include_ca: params.include_computed_attributes.to_b)
segment_info[:from_blueprint] = blueprint
maql_diff = nil
previous_master = segment_info[:previous_master]
diff_against_master = %w(diff_against_master_with_fallback diff_against_master)
Expand Down
1 change: 1 addition & 0 deletions lib/gooddata/lcm/lcm2.rb
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ def respond_to_missing?(name, *_args)
EnsureTechnicalUsersDomain,
EnsureTechnicalUsersProject,
SynchronizeLdm,
MigrateGdcDateDimension,
SynchronizeClients,
SynchronizeComputedAttributes,
CollectDymanicScheduleParams,
Expand Down
1 change: 1 addition & 0 deletions lib/gooddata/models/from_wire.rb
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ def self.parse_date_dimensions(date_dim)
d[:title] = date_dim['dateDimension']['title']
d[:urn] = date_dim['dateDimension']['urn']
d[:identifier_prefix] = date_dim['dateDimension']['identifierPrefix']
d[:identifier] = date_dim['dateDimension']['identifier'] if date_dim['dateDimension']['identifier']
d[:columns] = parse_bridges(date_dim)
end
end
Expand Down
14 changes: 11 additions & 3 deletions lib/gooddata/models/process.rb
Original file line number Diff line number Diff line change
Expand Up @@ -118,11 +118,13 @@ def deploy_simple_process(path, options = { client: GoodData.client, project: Go
GoodData.logger.info("Deploying #{path}") if verbose

deployed_path = Process.upload_package(path, files_to_exclude, client: client, project: project)
data_sources = options[:data_sources] || []
data = {
:process => {
:name => deploy_name,
:path => "/uploads/#{File.basename(deployed_path)}",
:type => type
:type => type,
:dataSources => data_sources
}
}

Expand Down Expand Up @@ -171,10 +173,12 @@ def deploy_from_appstore(path, options = { :client => GoodData.client, :project
verbose = options[:verbose] || false
GoodData.logger.info("Deploying #{path}") if verbose

data_sources = options[:data_sources] || []
data = {
process: {
name: deploy_name,
path: path,
dataSources: data_sources,
type: 'RUBY'
}
}
Expand All @@ -185,7 +189,7 @@ def deploy_from_appstore(path, options = { :client => GoodData.client, :project
def deploy_component(data, options = { client: GoodData.client, project: GoodData.project })
client, project = GoodData.get_client_and_project(options)
data = { process: data } unless data[:process]
data[:process] = GoodData::Helpers.symbolize_keys(data[:process]).select { |k| %i[type name component].include? k }
data[:process] = GoodData::Helpers.symbolize_keys(data[:process]).select { |k| %i[type name component dataSources].include? k }
data[:process][:component] = GoodData::Helpers.symbolize_keys(data[:process][:component]).select { |k| %i[name version configLocation config].include? k }

save(data, options)
Expand Down Expand Up @@ -266,7 +270,7 @@ def delete
# @option options [String] :name Readable name of the process
# @option options [Boolean] :verbose (false) Switch on verbose mode for detailed logging
def deploy(path, options = {})
Process.deploy(path, { client: client, process_id: process_id, :project => project, :name => name, :type => type }.merge(options))
Process.deploy(path, { client: client, process_id: process_id, :project => project, :name => name, :type => type, :data_sources => data_sources }.merge(options))
end

# Downloads the process from S3 in a zipped form.
Expand Down Expand Up @@ -326,6 +330,10 @@ def component
process['component']
end

def data_sources
process['dataSources']
end

# Determines whether the process is an ADDv2 component.
# @return [Bool] True if the process is an ADDv2 component.
def add_v2_component?
Expand Down
23 changes: 19 additions & 4 deletions lib/gooddata/models/project.rb
Original file line number Diff line number Diff line change
Expand Up @@ -271,11 +271,11 @@ def transfer_processes(from_project, to_project, options = {})

to_process = if process.path
to_process.delete if to_process
GoodData::Process.deploy_from_appstore(process.path, name: process.name, client: to_project.client, project: to_project)
Process.deploy_from_appstore(process.path, name: process.name, client: to_project.client, project: to_project, data_sources: process.data_sources)
elsif process.component
to_process.delete if to_process
process_hash = GoodData::Helpers::DeepMergeableHash[GoodData::Helpers.symbolize_keys(process.to_hash)].deep_merge(additional_hidden_params)
GoodData::Process.deploy_component(process_hash, project: to_project, client: to_project.client)
Process.deploy_component(process_hash, project: to_project, client: to_project.client)
else
Dir.mktmpdir('etl_transfer') do |dir|
dir = Pathname(dir)
Expand All @@ -285,9 +285,9 @@ def transfer_processes(from_project, to_project, options = {})
end

if to_process
to_process.deploy(filename, type: process.type, name: process.name)
to_process.deploy(filename, type: process.type, name: process.name, data_sources: process.data_sources)
else
to_project.deploy_process(filename, type: process.type, name: process.name)
to_project.deploy_process(filename, type: process.type, name: process.name, data_sources: process.data_sources)
end
end
end
Expand Down Expand Up @@ -625,6 +625,7 @@ def attributes_by_title(title)
def blueprint(options = {})
options = { include_ca: true }.merge(options)
result = client.get("/gdc/projects/#{pid}/model/view", params: { includeDeprecated: true, includeGrain: true, includeCA: options[:include_ca] })

polling_url = result['asyncTask']['link']['poll']
model = client.poll_on_code(polling_url, options)
bp = GoodData::Model::FromWire.from_wire(model, options)
Expand Down Expand Up @@ -1922,6 +1923,20 @@ def resolve_roles(login, desired_roles, options = {})
[user, roles]
end

def upgrade_custom_v2(message, options = {})
uri = "/gdc/md/#{pid}/datedimension/upgrade"
poll_result = client&.post(uri, message)

return poll_result['wTaskStatus']['status'] if poll_result['wTaskStatus'] && poll_result['wTaskStatus']['status']

polling_uri = poll_result['asyncTask']['link']['poll']
result = client&.poll_on_response(polling_uri, options) do |body|
body && body['wTaskStatus'] && body['wTaskStatus']['status'] == 'RUNNING'
end

result['wTaskStatus']['status'] == 'OK' ? 'OK' : 'FAIL'
end

def add
@add ||= GoodData::AutomatedDataDistribution.new(self)
@add
Expand Down
74 changes: 74 additions & 0 deletions spec/data/blueprints/old_date_dimension.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
{
"title": "blueprint old date dimension",
"include_ca": true,
"datasets": [
{
"type": "dataset",
"title": "person",
"id": "dataset.person",
"columns": [
{
"type": "anchor",
"id": "attr.person.factsof",
"title": "Records of person",
"description": "Records of person",
"folder": "person"
},
{
"type": "attribute",
"id": "attr.person.name",
"title": "name",
"description": "name",
"folder": "person"
},
{
"type": "label",
"id": "label.person.name",
"reference": "attr.person.name",
"title": "name",
"gd_data_type": "VARCHAR(128)",
"gd_type": "GDC.text",
"default_label": true
},
{
"type": "fact",
"id": "fact.person.id",
"title": "id",
"description": "id",
"folder": "person",
"gd_data_type": "DECIMAL(12,2)"
},
{
"type": "date",
"dataset": "datecustom"
},
{
"type": "date",
"dataset": "dategooddata"
}
]
}
],
"date_dimensions": [
{
"type": "date_dimension",
"id": "datecustom",
"title": "DateCustom",
"urn": "urn:custom:date",
"identifier_prefix": "datecustom",
"columns": [

]
},
{
"type": "date_dimension",
"id": "dategooddata",
"title": "DateGooddata",
"urn": "urn:gooddata:date",
"identifier_prefix": "dategooddata",
"columns": [

]
}
]
}
Loading