diff --git a/.gitignore b/.gitignore index 31286c8..c9c8b10 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,7 @@ config/*.yml !config/servername_environment_import_config.yml !config/servername_environment_export_config.yml !config/servername_environment_export_specific_config.yml +exports/* +Local_Gems/* +Tools/* +GrabNGoBundle/* \ No newline at end of file diff --git a/Gemfile b/Gemfile index 6734f78..eb459a6 100644 --- a/Gemfile +++ b/Gemfile @@ -1,3 +1,11 @@ source 'https://rubygems.org' -gem 'kinetic_sdk', '5.0.22' +gem 'kinetic_sdk', '5.0.26' +gem 'Find', '0.1.1' +gem 'logger', '1.4.2' +gem 'json', '2.3.0' +gem 'rexml/document' +gem 'optparse', '0.6.0' +gem 'rexml', '3.2.3.1' +gem 'io/console', '0.5.6' +gem 'base64', '0.2.0' \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 564e43b..1f13b30 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,18 +1,14 @@ GEM remote: https://rubygems.org/ specs: - kinetic_sdk (5.0.19) + kinetic_sdk (5.0.26) mime-types (>= 3.3.1) multipart-post (= 2.0.0) - parallel (= 1.12.1) - ruby-progressbar (= 1.9.0) slugify (= 1.0.7) - mime-types (3.3.1) + mime-types (3.5.2) mime-types-data (~> 3.2015) - mime-types-data (3.2021.0704) + mime-types-data (3.2024.0305) multipart-post (2.0.0) - parallel (1.12.1) - ruby-progressbar (1.9.0) slugify (1.0.7) PLATFORMS @@ -21,7 +17,7 @@ PLATFORMS x64-mingw32 DEPENDENCIES - kinetic_sdk (= 5.0.19) + kinetic_sdk (= 5.0.26) BUNDLED WITH 2.2.5 diff --git a/config/servername_environment_import_config.yml b/config/servername_environment_import_config.yml index 056d78d..0db6f6f 100644 --- a/config/servername_environment_import_config.yml +++ b/config/servername_environment_import_config.yml @@ -4,6 +4,7 @@ core: server_url: https://web-server.com space_slug: space_name: + old_space_slug: service_user_username: service_user_password: options: diff --git a/export.rb b/export.rb index 285cc78..d87ea72 100644 --- a/export.rb +++ b/export.rb @@ -1,3 +1,6 @@ +#TODO work +#Bluestone exported a workflow as "inactive" but it imported as "active" + # RUNNING THE SCRIPT: # ruby export.rb -c "<>" # ruby export.rb -c "config/foo-web-server.rb" @@ -19,6 +22,9 @@ SUBMISSIONS_TO_EXPORT: - datastore: true formSlug: + - datastore: false + kappSlug: + formSlug: REMOVE_DATA_PROPERTIES: - createdAt @@ -44,31 +50,34 @@ =end -require 'logger' +require 'logger' #For System Logging require 'json' -require 'optparse' -require 'kinetic_sdk' +require 'optparse' #For argument parsing +require 'kinetic_sdk' # Note you may need to run "Gem install Kinetic_sdk" +require 'Find' #For config list building +require 'io/console' #For password request +require 'base64' #For pwd encoding template_name = "platform-template" +$pwdFields = ["core","task"] -logger = Logger.new(STDERR) -logger.level = Logger::INFO -logger.formatter = proc do |severity, datetime, progname, msg| +$logger = Logger.new(STDERR) +$logger.level = Logger::INFO +$logger.formatter = proc do |severity, datetime, progname, msg| date_format = datetime.utc.strftime("%Y-%m-%dT%H:%M:%S.%LZ") "[#{date_format}] #{severity}: #{msg}\n" end + # Determine the Present Working Directory pwd = File.expand_path(File.dirname(__FILE__)) -ARGV << '-h' if ARGV.empty? # The options specified on the command line will be collected in *options*. options = {} OptionParser.new do |opts| opts.banner = "Usage: example.rb [options]" - opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| options["CONFIG_FILE"] = config end @@ -81,10 +90,79 @@ end end.parse! + +#Configuration Selection +def config_selection(config_folder_path) + + #Ensure config folder exists + if !File.directory?(config_folder_path) + $logger.info "Config folder not found at #{config_folder_path}" + puts "Cannot find config folder!" + puts "Exiting..." + gets + exit + end + + # #Determine Config file to use + config_exts = ['.yaml','.yml'] + configArray = [] + $logger.info "Checking #{config_folder_path} for config files" + #Check config folder for yaml/yml files containing the word 'export' + begin + Find.find("#{config_folder_path}/") do |file| + configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('export')) + end + rescue error + #No config files found in config folder + $logger.error "Error finding default config file path!" + $logger.error "Error reported: #{error}" + puts "Cannot find config files in default path! (#{pwd})" + puts "Exiting script..." + gets + exit + end + $logger.info "Found config files" + + #Print config file options with number indicators to select + puts "Select your config file" + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + $logger.info "Select section" + begin + print "Selection (0 to repeat options): " + sel = gets.chomp.to_i + begin + if sel === 0 + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + next + end + configFile = configArray[sel-1] + $logger.info "Option #{sel} - #{configFile}" + break + rescue + $logger.info "Error selecting config file! Exiting..." + puts "Error selecting config file!" + puts "Exiting..." + gets + exit + end + end while true + return configFile +end + + +#End method + # determine the directory paths platform_template_path = File.dirname(File.expand_path(__FILE__)) -core_path = File.join(platform_template_path, "core") -task_path = File.join(platform_template_path, "task") +config_folder_path = File.join(platform_template_path,'config') + +if options["CONFIG_FILE"].nil? + options["CONFIG_FILE"] = config_selection(config_folder_path) +end # ------------------------------------------------------------------------------ # methods @@ -112,20 +190,24 @@ def remove_discussion_id_attribute(model) # setup # ------------------------------------------------------------------------------ -logger.info "Installing gems for the \"#{template_name}\" template." +$logger.info "Installing gems for the \"#{template_name}\" template." Dir.chdir(platform_template_path) { system("bundle", "install") } vars = {} file = "#{platform_template_path}/#{options['CONFIG_FILE']}" # Check if configuration file exists -logger.info "Validating configuration file." +$logger.info "Validating configuration file." begin if File.exist?(file) != true - raise "The file \"#{options['CONFIG_FILE']}\" does not exist." + file = "#{config_folder_path}/#{options['CONFIG_FILE']}" + if File.exist?(file) != true + raise "The file \"#{options['CONFIG_FILE']}\" does not exist in the base or config directories." + end end rescue => error - logger.info error + $logger.info error + $logger.info "Exiting..." exit end @@ -133,11 +215,73 @@ def remove_discussion_id_attribute(model) begin vars.merge!( YAML.load(File.read(file)) ) rescue => error - logger.info "Error loading YAML configuration" - logger.info error + $logger.info "Error loading YAML configuration" + $logger.info error + $logger.info "Exiting..." + gets + exit +end +$logger.info "Configuration file passed validation." + + +#Check if nil/unencoded and update accordingly +def SecurePWD(file,vars,pwdAttribute) + #If no pwd, then ask for one, otherwise take current string that was not found to be B64 and convert + if vars[pwdAttribute]["service_user_password"].nil? + password = IO::console.getpass "Enter Password(#{pwdAttribute}): " + else + password = vars[pwdAttribute]["service_user_password"] + end + enc = Base64.strict_encode64(password) + vars[pwdAttribute]["service_user_password"] = enc.to_s + begin + fileObj = File.open(file, 'w') + puts "Updated pwd in #{pwdAttribute} to #{enc}" + fileObj.write vars.to_yaml + #{ |f| f.write vars.to_yaml } + rescue ArgumentError + $logger.error("There was an error while updating variables file:") + $logger.error(ArgumentError) + ensure + fileObj.close + end + #TODO - If you cannot properly write an encoded pwd, exit +end + +#Decode password to utilize +def DecodePWD(file, vars, pwdLoc) + pwdAttribute = vars[pwdLoc]["service_user_password"] + return Base64.decode64(pwdAttribute) +end + +#Confirm passwords exist and are in a proper format, call SecurePWD for any exceptions +def ValidatePWD(file, vars) + $pwdFields.each do |field| + t = vars[field]["service_user_password"] + #See if not a string, not encoded, or default + if !t.is_a?(String) || Base64.strict_encode64(Base64.decode64(t)) != t || t === "" + puts "Updating password #{t}" + SecurePWD(file, vars, field) + end + end +end + +ValidatePWD(file, vars) +vars["core"]["service_user_password"] = DecodePWD(file, vars, "core") +vars["task"]["service_user_password"] = DecodePWD(file, vars, "task") + +if vars["core"]["service_user_password"].empty? || vars["core"]["service_user_password"].nil? + puts "Core password is blank! Password required. Exiting..." + gets + exit +end +if vars["task"]["service_user_password"].empty? || vars["task"]["service_user_password"].nil? + puts "Task password is blank! Password required. Exiting..." + gets exit end -logger.info "Configuration file passed validation." + + # Set http_options based on values provided in the config file. http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| @@ -148,10 +292,38 @@ def remove_discussion_id_attribute(model) SUBMISSIONS_TO_EXPORT = vars["options"]["SUBMISSIONS_TO_EXPORT"] REMOVE_DATA_PROPERTIES = vars["options"]["REMOVE_DATA_PROPERTIES"] +#Config exports folder exists, if not then create +if !File.directory?(File.join(platform_template_path,"exports")) + Dir.mkdir(File.join(platform_template_path, "exports")) +end + +#Setting core paths utilzing variables +if !vars['core']['space_slug'].nil? + folderName = vars['core']['space_slug'] +elsif !vars['core']['space_name'].nil? + folderName = vars['core']['space_name'] +else + puts "No space slug or name provided! Please provide one in order to export..." + gets + exit +end +core_path = File.join(platform_template_path, "exports", folderName, "core") +task_path = File.join(platform_template_path, "exports", folderName, "task") + + +#Confirmation of space +puts "Exporting #{vars['core']['space_name']} to #{folderName} in 5 seconds..." +4.downto(1) do |n| + puts n + sleep(1) +end + + + # Output the yml file config -logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" +$logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" -logger.info "Setting up the SDK" +$logger.info "Setting up the SDK" space_sdk = KineticSdk::Core.new({ space_server_url: vars["core"]["server_url"], @@ -161,7 +333,7 @@ def remove_discussion_id_attribute(model) options: http_options.merge({ export_directory: "#{core_path}" }) }) -task_sdk = KineticSdk::Task.new({ +$task_sdk = KineticSdk::Task.new({ app_server_url: "#{vars["task"]["server_url"]}", username: vars["task"]["service_user_username"], password: vars["task"]["service_user_password"], @@ -174,48 +346,49 @@ def remove_discussion_id_attribute(model) # Validate Core Connection begin - logger.info "Validating connection to Core \"#{space_sdk.api_url}\"" - response = space_sdk.me() + $logger.info "Validating connection to Core \"#{$space_sdk.api_url}\"" + response = $space_sdk.me() if response.status == 0 raise response.message elsif response.status.to_s.match(/4\d{2}/) raise response.content['error'] end rescue => error - logger.info error + $logger.info error exit end # Validate Task Connection begin - logger.info "Validating connection to Task \"#{task_sdk.api_url}\"" - response = task_sdk.environment() + $logger.info "Validating connection to Task \"#{$task_sdk.api_url}\"" + response = $task_sdk.environment() if response.status == 0 raise response.message elsif response.status.to_s.match(/4\d{2}/) raise response.content['error'] end rescue => error - logger.info error + $logger.info error exit end -logger.info "Validating connection to Cors and Task was Successful" +$logger.info "Validating connection to Cors and Task was Successful" # ------------------------------------------------------------------------------ # core # ------------------------------------------------------------------------------ -logger.info "Removing files and folders from the existing \"#{template_name}\" template." +##Clear old folder/files +$logger.info "Removing files and folders from the existing \"#{template_name}\" template." FileUtils.rm_rf Dir.glob("#{core_path}/*") -logger.info "Setting up the Core SDK" +$logger.info "Setting up the Core SDK" # fetch export from core service and write to export directory -logger.info "Exporting the core components for the \"#{template_name}\" template." -logger.info " exporting with api: #{space_sdk.api_url}" -logger.info " - exporting configuration data (Kapps,forms, etc)" -space_sdk.export_space +$logger.info "Exporting the core components for the \"#{template_name}\" template." +$logger.info " exporting with api: #{$space_sdk.api_url}" +$logger.info " - exporting configuration data (Kapps,forms, etc)" +$space_sdk.export_space # cleanup properties that should not be committed with export # bridge keys @@ -252,11 +425,12 @@ def remove_discussion_id_attribute(model) File.open(filename, 'w') { |file| file.write(JSON.pretty_generate(model)) } end +#TODO - Flag for submissions to export # export submissions -logger.info "Exporting and writing submission data" +$logger.info "Exporting and writing submission data" (SUBMISSIONS_TO_EXPORT || []).delete_if{ |item| item["kappSlug"].nil?}.each do |item| is_datastore = item["datastore"] || false - logger.info "Exporting - #{is_datastore ? 'datastore' : 'kapp'} form #{item['formSlug']}" + $logger.info "Exporting - #{is_datastore ? 'datastore' : 'kapp'} form #{item['formSlug']}" # build directory to write files to submission_path = is_datastore ? "#{core_path}/space/datastore/forms/#{item['formSlug']}" : @@ -264,95 +438,134 @@ def remove_discussion_id_attribute(model) # get attachment fields from form definition attachment_form = is_datastore ? - space_sdk.find_datastore_form(item['formSlug'], {"include" => "fields.details"}) : - space_sdk.find_form(item['kappSlug'], item['formSlug'], {"include" => "fields.details"}) + $space_sdk.find_datastore_form(item['formSlug'], {"include" => "fields.details"}) : + $space_sdk.find_form(item['kappSlug'], item['formSlug'], {"include" => "fields.details"}) # get attachment fields from form definition attachement_files = attachment_form.status == 200 ? attachment_form.content['form']['fields'].select{ | file | file['dataType'] == "file" }.map { | field | field['name'] } : {} # set base url for attachments attachment_base_url = is_datastore ? - "#{space_sdk.api_url.gsub("/app/api/v1", "")}/app/datastore" : - "#{space_sdk.api_url.gsub("/app/api/v1", "")}" + "#{$space_sdk.api_url.gsub("/app/api/v1", "")}/app/datastore" : + "#{$space_sdk.api_url.gsub("/app/api/v1", "")}" # create folder to write submission data to FileUtils.mkdir_p(submission_path, :mode => 0700) - - # build params to pass to the retrieve_form_submissions method - params = {"include" => "details,children,origin,parent,values", "limit" => 1000, "direction" => "ASC"} + # open the submissions file in write mode file = File.open("#{submission_path}/submissions.ndjson", 'w'); - # ensure the file is empty file.truncate(0) - response = nil - begin - # get submissions from datastore form or form - response = is_datastore ? - space_sdk.find_all_form_datastore_submissions(item['formSlug'], params).content : - space_sdk.find_form_submissions(item['kappSlug'], item['formSlug'], params).content - if response.has_key?("submissions") - # iterate over each submission - (response["submissions"] || []).each do |submission| - # write each attachment to a a dir - submission['values'].select{ |field, value| attachement_files.include?(field)}.each{ |field,value| - submission_id = submission['id'] - # define the dir to contain the attahment - download_dir = "#{submission_path}/#{submission_id}/#{field}" - # evaluate fields with multiple attachments - value.map.with_index{ | attachment, index | - # create folder to write attachment - FileUtils.mkdir_p(download_dir, :mode => 0700) - # dir and file name to write attachment - download_path = "#{download_dir}/#{File.join(".", attachment['name'])}" - # url to retrieve the attachment - url = "#{attachment_base_url}/submissions/#{submission_id}/files/#{ERB::Util.url_encode(field)}/#{index}/#{ERB::Util.url_encode(attachment['name'])}" - # retrieve and write attachment - space_sdk.stream_download_to_file(download_path, url, {}, space_sdk.default_headers) - # add the "path" key to indicate the attachment's location - attachment['path'] = "/#{submission_id}/#{field}/#{attachment['name']}" + file.close() + file = File.open("#{submission_path}/submissions.ndjson", 'a'); + processed_submissions = false + createdAt = Time.now + previous = nil + # dataBlock = {} + # Iterate submissions in case over 1000 exist + while !processed_submissions && !createdAt.nil? do + # build params to pass to the retrieve_form_submissions method + params = {"include" => "details,children,origin,parent,values", "limit" => 1000, "direction" => "ASC"} + if !createdAt.nil? + params["q"] = "createdAt>=\"#{createdAt}\"" + end + + response = nil + begin + # get submissions from datastore form or form + response = is_datastore ? + $space_sdk.find_all_form_datastore_submissions(item['formSlug'], params).content : + $space_sdk.find_form_submissions(item['kappSlug'], item['formSlug'], params).content + if response.has_key?("submissions") + # File.write("outputtest.txt","#{response}") + # exit + # iterate over each submission + (response["submissions"] || []).each do |submission| + # write each attachment to a a dir + submission['values'].select{ |field, value| attachement_files.include?(field)}.each{ |field,value| + submission_id = submission['id'] + # define the dir to contain the attahment + download_dir = "#{submission_path}/#{submission_id}/#{field}" + # evaluate fields with multiple attachments + value.map.with_index{ | attachment, index | + # create folder to write attachment + FileUtils.mkdir_p(download_dir, :mode => 0700) + # dir and file name to write attachment + download_path = "#{download_dir}/#{File.join(".", attachment['name'])}" + # url to retrieve the attachment + url = "#{attachment_base_url}/submissions/#{submission_id}/files/#{ERB::Util.url_encode(field)}/#{index}/#{ERB::Util.url_encode(attachment['name'])}" + # retrieve and write attachment + $space_sdk.stream_download_to_file(download_path, url, {}, $space_sdk.default_headers) + # add the "path" key to indicate the attachment's location + attachment['path'] = "/#{submission_id}/#{field}/#{attachment['name']}" + } } - } - # append each submission (removing the submission unwanted attributes) - file.puts(JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)})) + # append each submission (removing the submission unwanted attributes) + # dataBlock = dataBlock.merge(JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)})) + json_string = JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)}) + unless json_string == previous + file.puts(json_string) + previous = json_string + end + # file.puts(JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)})) + end end + params['pageToken'] = response['nextPageToken'] + # get next page of submissions if there are more + end while !response.nil? && !response['nextPageToken'].nil? + # close the submissions file + # file.close() + # $logger.info "Subs" + + if response["submissions"].count == 1000 + #Check if another batch exists + createdAt = (response["submissions"].last)["createdAt"] + # $logger.info "LastSub: #{response["submissions"].last}" + $logger.debug "New created at #{createdAt}" + else + #If not, exit loop + $logger.debug "Exiting submission loop" + processed_submissions = true + createdAt = nil end - params['pageToken'] = response['nextPageToken'] - # get next page of submissions if there are more - end while !response.nil? && !response['nextPageToken'].nil? - # close the submissions file + + end file.close() + #Write to file + # file.puts(dataBlock) end -logger.info " - submission data export complete" +$logger.info " - submission data export complete" # ------------------------------------------------------------------------------ # task # ------------------------------------------------------------------------------ -logger.info "Removing files and folders from the existing \"#{template_name}\" template." +$logger.info "Removing files and folders from the existing \"#{template_name}\" template." FileUtils.rm_rf Dir.glob("#{task_path}/*") -logger.info "Exporting the task components for the \"#{template_name}\" template." -logger.info " exporting with api: #{task_sdk.api_url}" +$logger.info "Exporting the task components for the \"#{template_name}\" template." +$logger.info " exporting with api: #{$task_sdk.api_url}" # export all sources, trees, routines, handlers, # groups, policy rules, categories, and access keys -task_sdk.export_sources() -task_sdk.find_sources().content['sourceRoots'].each do |source| - task_sdk.find_trees({ "source" => source['name'] }).content['trees'].each do |tree| - task_sdk.export_tree(tree['title']) +$task_sdk.export_sources() +$task_sdk.find_sources().content['sourceRoots'].each do |source| + $task_sdk.find_trees({ "source" => source['name'] }).content['trees'].each do |tree| + $task_sdk.export_tree(tree['title']) end end -task_sdk.export_routines() -task_sdk.export_handlers() -task_sdk.export_groups() -task_sdk.export_policy_rules() -task_sdk.export_categories() -task_sdk.export_access_keys() +#Is above tied to below? +#TODO - Add flags/logic to have ability to be selective on what's exported/imported +$task_sdk.export_routines() +$task_sdk.export_handlers() +$task_sdk.export_groups() +$task_sdk.export_policy_rules() +$task_sdk.export_categories() +$task_sdk.export_access_keys() # ------------------------------------------------------------------------------ # complete # ------------------------------------------------------------------------------ -logger.info "Finished exporting the \"#{template_name}\" template." +$logger.info "Finished exporting the \"#{template_name}\" template." \ No newline at end of file diff --git a/import.rb b/import.rb index 8a09c6c..da4527d 100644 --- a/import.rb +++ b/import.rb @@ -8,6 +8,7 @@ # Teams are not deleted from destination. It could be too dangerous to delete them. # TODO +#Have better validation/notification if you cannot connect (Certificate issue) # RUNNING THE SCRIPT: # ruby import_script.rb -c "<>" @@ -35,869 +36,1246 @@ log_level: info log_output: stderr =end - -require 'logger' +require 'logger' #For System Logging require 'json' require 'rexml/document' -require 'optparse' +require 'optparse' #For argument parsing +# require 'kinetic_sdk' +require 'Find' #For config list building +require 'io/console' #For password request +require 'base64' #For pwd encoding +require 'concurrent-ruby' + +$LOAD_PATH.unshift('C:\Users\travis.wiese\Source\repos\kinetic-sdk-rb\lib') require 'kinetic_sdk' -include REXML - -template_name = "platform-template" - -logger = Logger.new(STDERR) -logger.level = Logger::INFO -logger.formatter = proc do |severity, datetime, progname, msg| - date_format = datetime.utc.strftime("%Y-%m-%dT%H:%M:%S.%LZ") - "[#{date_format}] #{severity}: #{msg}\n" -end -######################################### -# Determine the Present Working Directory -pwd = File.expand_path(File.dirname(__FILE__)) -ARGV << '-h' if ARGV.empty? +def import_space() + template_name = "platform-template" + $pwdFields = ["core","task"] -# The options specified on the command line will be collected in *options*. -options = {} -OptionParser.new do |opts| - opts.banner = "Usage: example.rb [options]" - opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| - options["CONFIG_FILE"] = config + $logger = Logger.new(STDERR) + $logger.level = Logger::INFO + $logger.formatter = proc do |severity, datetime, progname, msg| + date_format = datetime.utc.strftime("%Y-%m-%dT%H:%M:%S.%LZ") + "[#{date_format}] #{severity}: #{msg}\n" end - - # No argument, shows at tail. This will print an options summary. - # Try it and see! - opts.on_tail("-h", "--help", "Show this message") do - puts opts - exit - end -end.parse! -#Now raise an exception if we have not found a CONFIG_FILE option -raise OptionParser::MissingArgument if options["CONFIG_FILE"].nil? + ######################################### + # Determine the Present Working Directory + pwd = File.expand_path(File.dirname(__FILE__)) + # ARGV << '-h' if ARGV.empty? -# determine the directory paths -platform_template_path = File.dirname(File.expand_path(__FILE__)) -core_path = File.join(platform_template_path, "core") -task_path = File.join(platform_template_path, "task") + # The options specified on the command line will be collected in *options*. + options = {} + OptionParser.new do |opts| + opts.banner = "Usage: example.rb [options]" + opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| + options["CONFIG_FILE"] = config + end + + # No argument, shows at tail. This will print an options summary. + # Try it and see! + opts.on_tail("-h", "--help", "Show this message") do + puts opts + exit + end + end.parse! -# ------------------------------------------------------------------------------ -# methods -# ------------------------------------------------------------------------------ + max_threads = 10 + $pool = Concurrent::FixedThreadPool.new(max_threads) + $mutex = Mutex.new + kapps_array = [] + kpromises = [] -# ------------------------------------------------------------------------------ -# constants -# ------------------------------------------------------------------------------ + #End method -# ------------------------------------------------------------------------------ -# setup -# ------------------------------------------------------------------------------ + # determine the directory paths + platform_template_path = File.dirname(File.expand_path(__FILE__)) + config_folder_path = File.join(platform_template_path,'config') -logger.info "Installing gems for the \"#{template_name}\" template." -Dir.chdir(platform_template_path) { system("bundle", "install") } + if options["CONFIG_FILE"].nil? + options["CONFIG_FILE"] = config_selection(config_folder_path) + end + $logger.info "Installing gems for the \"#{template_name}\" template." + Dir.chdir(platform_template_path) { system("bundle", "install") } + vars = {} + file = "#{platform_template_path}/#{options['CONFIG_FILE']}" -# ------------------------------------------------------------------------------ -# core -# ------------------------------------------------------------------------------ -vars = {} -# Read the config file specified in the command line into the variable "vars" -if File.file?(file = "#{platform_template_path}/#{options['CONFIG_FILE']}") - vars.merge!( YAML.load(File.read("#{platform_template_path}/#{options['CONFIG_FILE']}")) ) -elsif - raise "Config file not found: #{file}" -end + # Check if configuration file exists + $logger.info "Validating configuration file." + begin + if File.exist?(file) != true + file = "#{config_folder_path}/#{options['CONFIG_FILE']}" + if File.exist?(file) != true + raise "The file \"#{options['CONFIG_FILE']}\" does not exist in the base or config directories." + end + end + rescue => error + $logger.info error + $logger.info "Exiting..." + exit + end -# Set http_options based on values provided in the config file. -http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| - result[k.to_sym] = v -end + # Read the config file specified in the command line into the variable "" + begin + vars.merge!( YAML.load(File.read(file)) ) + rescue => error + $logger.info "Error loading YAML configuration" + $logger.info error + $logger.info "Exiting..." + gets + exit + end + $logger.info "Configuration file passed validation." -# Set option values to default values if not included -vars["options"] = !vars["options"].nil? ? vars["options"] : {} -vars["options"]["delete"] = !vars["options"]["delete"].nil? ? vars["options"]["delete"] : false -logger.info "Importing using the config: #{JSON.pretty_generate(vars)}" + ValidatePWD(file, vars) + #Will confirm there is a valid, encoded password and decode. Otherwise it will prompt/encode pwd and return decoded variant + vars["core"]["service_user_password"] = DecodePWD(file, vars,"core") + vars["task"]["service_user_password"] = DecodePWD(file, vars, "task") -space_sdk = KineticSdk::Core.new({ - space_server_url: vars["core"]["server_url"], - space_slug: vars["core"]["space_slug"], - username: vars["core"]["service_user_username"], - password: vars["core"]["service_user_password"], - options: http_options.merge({ export_directory: "#{core_path}" }) -}) -puts "Are you sure you want to perform an import of data to #{vars["core"]["server_url"]}? [Y/N]" -STDOUT.flush -case (gets.downcase.chomp) -when 'y' - puts "Continuing Import" - STDOUT.flush -else - abort "Exiting Import" -end + if vars["core"]["service_user_password"].empty? || vars["core"]["service_user_password"].nil? + puts "Core password is blank! Password required. Exiting..." + gets + exit + end + if vars["task"]["service_user_password"].empty? || vars["task"]["service_user_password"].nil? + puts "Task password is blank! Password required. Exiting..." + gets + exit + end -################################################################### -# ------------------------------------------------------------------------------ -# Update Space Attributes -# ------------------------------------------------------------------------------ -sourceSpaceAttributeArray = [] -destinationSpaceAttributeArray = (space_sdk.find_space_attribute_definitions().content['spaceAttributeDefinitions']|| {}).map { |definition| definition['name']} -if File.file?(file = "#{core_path}/space/spaceAttributeDefinitions.json") - spaceAttributeDefinitions = JSON.parse(File.read(file)) + # Set http_options based on values provided in the config file. + http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| + result[k.to_sym] = v + end - spaceAttributeDefinitions.each { |attribute| - if destinationSpaceAttributeArray.include?(attribute['name']) - space_sdk.update_space_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_space_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceSpaceAttributeArray.push(attribute['name']) - } -end - -destinationSpaceAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceSpaceAttributeArray.include?(attribute) - space_sdk.delete_space_attribute_definition(attribute) - end -} - -# ------------------------------------------------------------------------------ -# Update User Attributes -# ------------------------------------------------------------------------------ -sourceUserAttributeArray = [] -destinationUserAttributeArray = (space_sdk.find_user_attribute_definitions().content['userAttributeDefinitions'] || {}).map { |definition| definition['name']} - -if File.file?(file = "#{core_path}/space/userAttributeDefinitions.json") - userAttributeDefinitions = JSON.parse(File.read(file)) - userAttributeDefinitions.each { |attribute| - if destinationUserAttributeArray.include?(attribute['name']) - space_sdk.update_user_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_user_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceUserAttributeArray.push(attribute['name']) - } -end + #Config exports folder exists, if not then create + if !File.directory?(File.join(platform_template_path,"exports")) + Dir.mkdir(File.join(platform_template_path, "exports")) + end -destinationUserAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceUserAttributeArray.include?(attribute) - space_sdk.delete_user_attribute_definition(attribute) + #Setting core paths utilzing variables - Check old_space_slug -> space_slug -> space_name + if !vars['core']['old_space_slug'].nil? + folderName = vars['core']['old_space_slug'] + elsif !vars['core']['space_slug'].nil? + folderName = vars['core']['space_slug'] + elsif !vars['core']['space_name'].nil? + folderName = vars['core']['space_name'] + else + puts "No space slug or name provided! Please provide one in order to export..." + gets + exit end -} -# ------------------------------------------------------------------------------ -# Update User Profile Attributes -# ------------------------------------------------------------------------------ + core_path = File.join(platform_template_path, "exports", folderName, "core") + task_path = File.join(platform_template_path, "exports", folderName, "task") -sourceUserProfileAttributeArray = [] -destinationUserProfileAttributeArray = (space_sdk.find_user_profile_attribute_definitions().content['userProfileAttributeDefinitions'] || {}).map { |definition| definition['name']} + # Output the yml file config + $logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" -if File.file?(file = "#{core_path}/space/userProfileAttributeDefinitions.json") - userProfileAttributeDefinitions = JSON.parse(File.read(file)) + $logger.info "Setting up the SDK" - userProfileAttributeDefinitions.each { |attribute| - if destinationUserProfileAttributeArray.include?(attribute['name']) - space_sdk.update_user_profile_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_user_profile_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceUserProfileAttributeArray.push(attribute['name']) - } -end + $space_sdk = KineticSdk::Core.new({ + space_server_url: vars["core"]["server_url"], + space_slug: vars["core"]["space_slug"], + username: vars["core"]["service_user_username"], + password: vars["core"]["service_user_password"], + options: http_options.merge({ export_directory: "#{core_path}" }) + }) -destinationUserProfileAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceUserProfileAttributeArray.include?(attribute) - space_sdk.delete_user_profile_attribute_definition(attribute) + puts "Are you sure you want to perform an import of data from #{folderName} to #{vars["core"]["server_url"]}? [Y/N]" + STDOUT.flush + case (gets.downcase.chomp) + when 'y' + puts "Continuing Import" + STDOUT.flush + else + abort "Exiting Import" end -} + -# ------------------------------------------------------------------------------ -# Update Team Attributes -# ------------------------------------------------------------------------------ - -sourceTeamAttributeArray = [] -destinationTeamAttributeArray = (space_sdk.find_team_attribute_definitions().content['teamAttributeDefinitions']|| {}).map { |definition| definition['name']} + import_bridge_models(core_path,vars) -if File.file?(file = "#{core_path}/space/teamAttributeDefinitions.json") - teamAttributeDefinitions = JSON.parse(File.read(file)) - teamAttributeDefinitions.each { |attribute| - if destinationTeamAttributeArray.include?(attribute['name']) - space_sdk.update_team_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_team_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceTeamAttributeArray.push(attribute['name']) - } -end + # ------------------------------------------------------------------------------ + # delete bridge models + # Delete any Bridges from the destination which are missing from the import data + # ------------------------------------------------------------------------------ + import_space_web_apis(core_path) -destinationTeamAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceTeamAttributeArray.include?(attribute) - space_sdk.delete_team_attribute_definition(attribute) - end -} + # ------------------------------------------------------------------------------ + # delete space teams + # TODO: A method doesn't exist for deleting the team + # ------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ + # import kapp data + # ------------------------------------------------------------------------------ -# ------------------------------------------------------------------------------ -# Update Datastore Attributes -# ------------------------------------------------------------------------------ + Dir["#{core_path}/space/kapps/*"].each { |file| + kpromises << Concurrent::Promise.execute(executor: $pool) do + begin + kapp_slug = file.split(File::SEPARATOR).map {|x| x=="" ? File::SEPARATOR : x}.last.gsub('.json','') + next if kapps_array.include?(kapp_slug) # If the loop has already iterated over the kapp from the kapp file or the kapp dir skip the iteration + kapps_array.push(kapp_slug) # Append the kapp_slug to an array so a duplicate iteration doesn't occur + kapp = {} + kapp['slug'] = kapp_slug # set kapp_slug + + if File.file?(file) or ( File.directory?(file) and File.file?(file = "#{file}.json") ) # If the file is a file or a dir with a corresponding json file + kapp = JSON.parse( File.read(file) ) + kappExists = $space_sdk.find_kapp(kapp['slug']).code.to_i == 200 + if kappExists + $space_sdk.update_kapp(kapp['slug'], kapp) + else + $space_sdk.add_kapp(kapp['name'], kapp['slug'], kapp) + end + end + + + import_kapp_attribute_definitions(core_path, kapp) + import_kapp_form_attribute_definitions(core_path,kapp) + import_kapp_form_type_definitions(core_path,kapp) + + import_kapp_security_policy_definitions(core_path, kapp) + + # ------------------------------------------------------------------------------ + # Migrate Kapp Categories + # ------------------------------------------------------------------------------ + import_kapp_categories(core_path) + + + + # ------------------------------------------------------------------------------ + # import space webhooks + # ------------------------------------------------------------------------------ + sourceSpaceWebhooksArray = [] + destinationSpaceWebhooksArray = ($space_sdk.find_webhooks_on_space({"include"=>"details"}).content['webhooks'] || {}).map{ |webhook| {"name" => webhook['name'], "updatedAt"=>webhook['updatedAt']} } + + Dir["#{core_path}/space/webhooks/*.json"].each{ |file| + webhook = JSON.parse(File.read(file)) + destinationWebhook = destinationSpaceWebhooksArray.find {|destination_webhook| destination_webhook['name'] == webhook['name']} + if destinationSpaceWebhooksArray.include?(webhook['name']) + + $space_sdk.update_webhook_on_space(webhook['name'], webhook) + elsif + $space_sdk.add_webhook_on_space(webhook) + end + sourceSpaceWebhooksArray.push(webhook['name']) + } -sourceDatastoreAttributeArray = [] -destinationDatastoreAttributeArray =(space_sdk.find_datastore_form_attribute_definitions().content['datastoreFormAttributeDefinitions'] || {}).map { |definition| definition['name']} + # ------------------------------------------------------------------------------ + # delete space webhooks + # TODO: A method doesn't exist for deleting the webhook + # ------------------------------------------------------------------------------ -if File.file?(file = "#{core_path}/space/datastoreFormAttributeDefinitions.json") - datastoreFormAttributeDefinitions = JSON.parse(File.read(file)) - datastoreFormAttributeDefinitions.each { |attribute| - if destinationDatastoreAttributeArray.include?(attribute['name']) - space_sdk.update_datastore_form_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_datastore_form_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + destinationSpaceWebhooksArray.each do |webhook| + if vars["options"]["delete"] && !sourceSpaceWebhooksArray.include?(webhook) + $space_sdk.delete_webhook_on_space(webhook) + end + end + + # ------------------------------------------------------------------------------ + # Migrate Kapp Webhooks + # ------------------------------------------------------------------------------ + sourceWebhookArray = [] + webhooks_on_kapp = $space_sdk.find_webhooks_on_kapp(kapp['slug']) + + if webhooks_on_kapp.code=="200" + destinationWebhookArray = (webhooks_on_kapp.content['webhooks'] || {}).map { |definition| definition['name']} + Dir["#{core_path}/space/kapps/#{kapp['slug']}/webhooks/*.json"].each{ |webhookFile| + webhookDef = JSON.parse(File.read(webhookFile)) + if destinationWebhookArray.include?(webhookDef['name']) + $space_sdk.update_webhook_on_kapp(kapp['slug'], webhookDef['name'], webhookDef) + else + $space_sdk.add_webhook_on_kapp(kapp['slug'], webhookDef) + end + sourceWebhookArray.push(webhookDef['name']) + } + + # ------------------------------------------------------------------------------ + # Delete Kapp Webhooks + # ------------------------------------------------------------------------------ + destinationWebhookArray.each { | attribute | + if vars["options"]["delete"] && !sourceWebhookArray.include?(attribute) + $space_sdk.delete_webhook_on_kapp(kapp['slug'],attribute) + end + } + end + + + + import_forms(core_path,kapp,vars) + + + ##TODO - Convert to csv upload + ## PATCH https://playground-travis-wiese.kinopsdev.io/app/api/v1/kapps/kapp1/forms/f1/submissions?import + ## + + # ------------------------------------------------------------------------------ + # Import Kapp Form Data + # ------------------------------------------------------------------------------ + + import_kapp_form_data(core_path,kapp) + import_kapp_web_apis(core_path,kapp) + + # ------------------------------------------------------------------------------ + # Delete Kapp Web APIs + # ------------------------------------------------------------------------------ + destinationWebApisArray.each { | webApi | + if vars["options"]["delete"] && !sourceWebApisArray.include?(webApi) + $space_sdk.delete_kapp_webapi(kapp['slug'], webApi) + end + } + rescue end - sourceDatastoreAttributeArray.push(attribute['name']) - } -end + end -destinationDatastoreAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceDatastoreAttributeArray.include?(attribute) - #Delete form is disabled - #space_sdk.delete_datastore_form_attribute_definition(attribute) - end -} + } + kpromises.each(&:wait!) + + #End Kapp loop -# ------------------------------------------------------------------------------ -# Update Security Policy -# ------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ + # task + # ------------------------------------------------------------------------------ -sourceSecurityPolicyArray = [] -destinationSecurityPolicyArray = (space_sdk.find_space_security_policy_definitions().content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} + $task_sdk = KineticSdk::Task.new({ + app_server_url: "#{vars["task"]["server_url"]}", + username: vars["task"]["service_user_username"], + password: vars["task"]["service_user_password"], + options: http_options.merge({ export_directory: "#{task_path}" }) + }) -if File.file?(file = "#{core_path}/space/securityPolicyDefinitions.json") - securityPolicyDefinitions = JSON.parse(File.read(file)) - securityPolicyDefinitions.each { |attribute| - if destinationSecurityPolicyArray.include?(attribute['name']) - space_sdk.update_space_security_policy_definition(attribute['name'], attribute) - else - space_sdk.add_space_security_policy_definition(attribute) - end - sourceSecurityPolicyArray.push(attribute['name']) - } -end + # ------------------------------------------------------------------------------ + # task import + # ------------------------------------------------------------------------------ -destinationSecurityPolicyArray.each { | attribute | - if vars["options"]["delete"] && !sourceSecurityPolicyArray.include?(attribute) - space_sdk.delete_space_security_policy_definition(attribute) - end -} + $logger.info "Importing the task components for the \"#{template_name}\" template." + $logger.info " importing with api: #{$task_sdk.api_url}" + # ------------------------------------------------------------------------------ + # task handlers + # ------------------------------------------------------------------------------ -# ------------------------------------------------------------------------------ -# import bridge models -# *NOTE* - This if the bridge doesn't exist the model will be imported w/ an empty "Bridge Slug" value. -# ------------------------------------------------------------------------------ + # import handlers forcing overwrite + $task_sdk.import_handlers_threaded(true) -destinationModels = space_sdk.find_bridge_models() -destinationModels_Array = (destinationModels.content['models'] || {}).map{ |model| model['name']} + # ------------------------------------------------------------------------------ + # Import Task Trees and Routines + # ------------------------------------------------------------------------------ -Dir["#{core_path}/space/models/*.json"].each{ |model| - body = JSON.parse(File.read(model)) - if destinationModels_Array.include?(body['name']) - space_sdk.update_bridge_model(body['name'], body) - elsif - space_sdk.add_bridge_model(body) - end -} + # import routines and force overwrite + $task_sdk.import_routines_threaded(true) + # import trees and force overwrite + $task_sdk.import_trees_threaded(true) -# ------------------------------------------------------------------------------ -# delete bridge models -# Delete any Bridges from the destination which are missing from the import data -# ------------------------------------------------------------------------------ -SourceModelsArray = Dir["#{core_path}/space/models/*.json"].map{ |model| JSON.parse(File.read(model))['name'] } -destinationModels_Array.each do |model| - if vars["options"]["delete"] && !SourceModelsArray.include?(model) - space_sdk.delete_bridge_model(model) - end -end -# ------------------------------------------------------------------------------ -# Import Space Web APIs -# ------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ + # import task categories + # ------------------------------------------------------------------------------ -sourceSpaceWebApisArray = [] -destinationSpaceWebApisArray = (space_sdk.find_space_webapis().content['webApis'] || {}).map { |definition| definition['slug']} + sourceCategories = [] #From import data + destinationCategories = ($task_sdk.find_categories().content['categories'] || {}).map{ |category| {'category'=>category['name'],'updatedAt'=>category['updatedAt']}} - -Dir["#{core_path}/space/webApis/*"].each{ |file| - body = JSON.parse(File.read(file)) - if destinationSpaceWebApisArray.include?(body['slug']) - space_sdk.update_space_webapi(body['slug'], body) - else - space_sdk.add_space_webapi(body) - end - sourceSpaceWebApisArray.push(body['slug']) -} - -# ------------------------------------------------------------------------------ -# Delete Space Web APIs -# Delete any Web APIs from the destination which are missing from the import data -# ------------------------------------------------------------------------------ -destinationSpaceWebApisArray.each { | webApi | - if vars["options"]["delete"] && !sourceSpaceWebApisArray.include?(webApi) - space_sdk.delete_space_webapi(webApi) - end -} - -# ------------------------------------------------------------------------------ -# import datastore forms -# ------------------------------------------------------------------------------ -destinationDatastoreForms = [] #From destination server -sourceDatastoreForms = [] #From import data - -logger.info "Importing datastore forms for #{vars["core"]["space_slug"]}" - - destinationDatastoreForms = (space_sdk.find_datastore_forms().content['forms'] || {}).map{ |datastore| datastore['slug']} - Dir["#{core_path}/space/datastore/forms/*.json"].each { |datastore| - body = JSON.parse(File.read(datastore)) - sourceDatastoreForms.push(body['slug']) - if destinationDatastoreForms.include?(body['slug']) - space_sdk.update_datastore_form(body['slug'], body) + #TODO - No updatedAt in category file + Dir["#{task_path}/categories/*.json"].each { |file| + category = JSON.parse(File.read(file)) + + sourceCategories.push(category['name']) + + if destinationCategories.include?(category['name']) + $task_sdk.update_category(category['name'], category) else - space_sdk.add_datastore_form(body) + $task_sdk.add_category(category) end } -# ------------------------------------------------------------------------------ -# delete datastore forms -# Delete any form from the destination which are missing from the import data -# ------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ + # delete task categories + # ------------------------------------------------------------------------------ + destinationCategories.each { |category| + if vars["options"]["delete"] && !sourceCategories.include?(category) + $task_sdk.delete_category(category) + end + } -destinationDatastoreForms.each { |datastore_slug| - if vars["options"]["delete"] && !sourceDatastoreForms.include?(datastore_slug) - space_sdk.delete_datastore_form(datastore_slug) - end -} + # ------------------------------------------------------------------------------ + # import task policy rules + # ------------------------------------------------------------------------------ -# ------------------------------------------------------------------------------ -# Import Datastore Data -# ------------------------------------------------------------------------------ -Dir["#{core_path}/space/datastore/forms/**/submissions*.ndjson"].sort.each { |filename| - dir = File.dirname(filename) - form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] - (space_sdk.find_all_form_datastore_submissions(form_slug).content['submissions'] || []).each { |submission| - space_sdk.delete_datastore_submission(submission['id']) - } - File.readlines(filename).each { |line| - submission = JSON.parse(line) - submission["values"].map { |field, value| - # if the value contains an array of files - if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') - value.map.with_index { |file, index| - # add 'path' key to the attribute value indicating the location of the attachment - file['path'] = "#{dir}#{file['path']}" - } - end + destinationPolicyRuleArray = $task_sdk.find_policy_rules().content['policyRules'] + sourcePolicyRuleArray = Dir["#{task_path}/policyRules/*.json"].map{ |file| + rule = JSON.parse(File.read(file)) + {"name" => rule['name'], "type" => rule['type']} } - body = { - "values" => submission["values"], - "coreState" => submission["coreState"] - } - space_sdk.add_datastore_submission(form_slug, body).content - } -} - -# ------------------------------------------------------------------------------ -# import space teams -# ------------------------------------------------------------------------------ -if (teams = Dir["#{core_path}/space/teams/*.json"]).length > 0 - SourceTeamArray = [] - destinationTeamsArray = (space_sdk.find_teams().content['teams'] || {}).map{ |team| {"slug" => team['slug'], "name"=>team['name']} } - teams.each{ |team| - body = JSON.parse(File.read(team)) - if !destinationTeamsArray.find {|destination_team| destination_team['slug'] == body['slug'] }.nil? - space_sdk.update_team(body['slug'], body) + + Dir["#{task_path}/policyRules/*.json"].each { |file| + rule = JSON.parse(File.read(file)) + if !destinationPolicyRuleArray.find {|dest_rule| dest_rule['name']==rule['name'] && dest_rule['type']==rule['type'] }.nil? + $task_sdk.update_policy_rule(rule.slice('type', 'name'), rule) else - space_sdk.add_team(body) + $task_sdk.add_policy_rule(rule) end - #Add Attributes to the Team - (body['attributes'] || []).each{ | attribute | - space_sdk.add_team_attribute(body['name'], attribute['name'], attribute['values']) - } - SourceTeamArray.push({'name' => body['name'], 'slug'=>body['slug']} ) } # ------------------------------------------------------------------------------ - # delete space teams - # TODO: A method doesn't exist for deleting the team + # delete task policy rules # ------------------------------------------------------------------------------ - - destinationTeamsArray.each { |team| - #if !SourceTeamArray.include?(team) - if SourceTeamArray.find {|source_team| source_team['slug'] == team['slug'] }.nil? - #Delete has been disabled. It is potentially too dangerous to include w/o advanced knowledge. - #space_sdk.delete_team(team['slug']) + destinationPolicyRuleArray.each { |rule| + if vars["options"]["delete"] && sourcePolicyRuleArray.find {|source_rule| source_rule['name']==rule['name'] && source_rule['type']==rule['type'] }.nil? + $task_sdk.delete_policy_rule(rule) end } -end - -# ------------------------------------------------------------------------------ -# import kapp data -# ------------------------------------------------------------------------------ - -kapps_array = [] -Dir["#{core_path}/space/kapps/*"].each { |file| - kapp_slug = file.split(File::SEPARATOR).map {|x| x=="" ? File::SEPARATOR : x}.last.gsub('.json','') - next if kapps_array.include?(kapp_slug) # If the loop has already iterated over the kapp from the kapp file or the kapp dir skip the iteration - kapps_array.push(kapp_slug) # Append the kapp_slug to an array so a duplicate iteration doesn't occur - kapp = {} - kapp['slug'] = kapp_slug # set kapp_slug - - if File.file?(file) or ( File.directory?(file) and File.file?(file = "#{file}.json") ) # If the file is a file or a dir with a corresponding json file - kapp = JSON.parse( File.read(file) ) - kappExists = space_sdk.find_kapp(kapp['slug']).code.to_i == 200 - if kappExists - space_sdk.update_kapp(kapp['slug'], kapp) - else - space_sdk.add_kapp(kapp['name'], kapp['slug'], kapp) - end - end # ------------------------------------------------------------------------------ - # Migrate Kapp Attribute Definitions + # Delete Trees and Routines not in the Source Data # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/kappAttributeDefinitions.json") - sourceKappAttributeArray = [] - destinationKappAttributeArray = (space_sdk.find_kapp_attribute_definitions(kapp['slug']).content['kappAttributeDefinitions'] || {}).map { |definition| definition['name']} - kappAttributeDefinitions = JSON.parse(File.read(file)) - (kappAttributeDefinitions || []).each { |attribute| - if destinationKappAttributeArray.include?(attribute['name']) - space_sdk.update_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceKappAttributeArray.push(attribute['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Attribute Definitions - # ------------------------------------------------------------------------------ - destinationKappAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceKappAttributeArray.include?(attribute) - space_sdk.delete_kapp_attribute_definition(kapp['slug'],attribute) + + # identify Trees and Routines on destination + destinationtrees = [] + trees = $task_sdk.find_trees().content + (trees['trees'] || []).each { |tree| + destinationtrees.push( tree['title'] ) + } + + # identify Routines in source data + begin + sourceTrees = [] + Dir["#{task_path}/routines/*.xml"].each {|routine| + doc = REXML::Document.new(File.new(routine)) + root = doc.root + sourceTrees.push("#{root.elements["taskTree/name"].text}") + } + rescue + $logger.error "Error while identifying routines" + end + + begin + # identify trees in source data + Dir["#{task_path}/sources/*"].each {|source| + if File.directory? source + Dir["#{source}/trees/*.xml"].each { |tree| + doc = REXML::Document.new(File.new(tree)) + root = doc.root + tree = "#{root.elements["sourceName"].text} :: #{root.elements["sourceGroup"].text} :: #{root.elements["taskTree/name"].text}" + sourceTrees.push(tree) + } end } + rescue + $logger.error "Error identifying trees" end - # ------------------------------------------------------------------------------ - # Migrate Kapp Category Definitions - # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categoryAttributeDefinitions.json") - sourceKappCategoryArray = [] - destinationKappAttributeArray = (space_sdk.find_category_attribute_definitions(kapp['slug']).content['categoryAttributeDefinitions'] || {}).map { |definition| definition['name']} - kappCategoryDefinitions = JSON.parse(File.read(file)) - (kappCategoryDefinitions || []).each { |attribute| - if destinationKappAttributeArray.include?(attribute['name']) - space_sdk.update_category_attribute_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_category_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceKappCategoryArray.push(attribute['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Category Definitions - # ------------------------------------------------------------------------------ - destinationKappAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceKappCategoryArray.include?(attribute) - space_sdk.delete_category_attribute_definition(kapp['slug'],attribute) + begin + # Delete the extra tress and routines on the source + destinationtrees.each { | tree | + if vars["options"]["delete"] && !sourceTrees.include?(tree) + treeDef = tree.split(' :: ') + $task_sdk.delete_tree( tree ) end } + rescue + $logger.error "Error deleting extra trees/routines on source" end + + + # Import v6 workflows as these are not not the same as Trees and Routines + $logger.info "Importing workflows" + $space_sdk.import_workflows(vars["core"]["space_slug"]) + + # ------------------------------------------------------------------------------ + # complete + # ------------------------------------------------------------------------------ + + $logger.info "Finished importing the \"#{template_name}\" forms." + + $pool.shutdown + $pool.wait_for_termination +end + + + + + + + ################################################################################ + # Import Methods + ################################################################################ + # ------------------------------------------------------------------------------ - # Migrate Kapp Form Attribute Definitions + # Update Space Attributes # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formAttributeDefinitions.json") - sourceFormAttributeArray = [] - destinationFormAttributeArray = (space_sdk.find_form_attribute_definitions(kapp['slug']).content['formAttributeDefinitions'] || {}).map { |definition| definition['name']} - formAttributeDefinitions = JSON.parse(File.read(file)) - (formAttributeDefinitions || []).each { |attribute| - if destinationFormAttributeArray.include?(attribute['name']) - space_sdk.update_form_attribute_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_form_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceFormAttributeArray.push(attribute['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Form Attribute Definitions - # ------------------------------------------------------------------------------ - destinationFormAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceFormAttributeArray.include?(attribute) - space_sdk.delete_form_attribute_definition(kapp['slug'],attribute) + + def update_space_attributes(core_path) + sourceSpaceAttributeArray = [] + destinationSpaceAttributeArray = ($space_sdk.find_space_attribute_definitions().content['spaceAttributeDefinitions']|| {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/spaceAttributeDefinitions.json") + spaceAttributeDefinitions = JSON.parse(File.read(file)) + + spaceAttributeDefinitions.each { |attribute| + if destinationSpaceAttributeArray.include?(attribute['name']) + $space_sdk.update_space_attribute_definition(attribute['name'], attribute) + else + $space_sdk.add_space_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceSpaceAttributeArray.push(attribute['name']) + } + end + destinationSpaceAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceSpaceAttributeArray.include?(attribute) + $space_sdk.delete_space_attribute_definition(attribute) end } end - + + + + # ------------------------------------------------------------------------------ - # Migrate Kapp Form Type Definitions + # Update User Attributes # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formTypes.json") - sourceFormTypesArray = [] - destinationFormTypesArray = (space_sdk.find_formtypes(kapp['slug']).content['formTypes'] || {}).map { |formTypes| formTypes['name']} - formTypes = JSON.parse(File.read(file)) - (formTypes || []).each { |body| - if destinationFormTypesArray.include?(body['name']) - space_sdk.update_formtype(kapp['slug'], body['name'], body) - else - space_sdk.add_formtype(kapp['slug'], body) - end - sourceFormTypesArray.push(body['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Form Type Definitions - # ------------------------------------------------------------------------------ - destinationFormTypesArray.each { | name | - if vars["options"]["delete"] && !sourceFormTypesArray.include?(name) - space_sdk.delete_formtype(kapp['slug'],name) + def update_user_attributes( core_path) + sourceUserAttributeArray = [] + destinationUserAttributeArray = ($space_sdk.find_user_attribute_definitions().content['userAttributeDefinitions'] || {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/userAttributeDefinitions.json") + userAttributeDefinitions = JSON.parse(File.read(file)) + userAttributeDefinitions.each { |attribute| + if destinationUserAttributeArray.include?(attribute['name']) + $space_sdk.update_user_attribute_definition(attribute['name'], attribute) + else + $space_sdk.add_user_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceUserAttributeArray.push(attribute['name']) + } + end + + destinationUserAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceUserAttributeArray.include?(attribute) + $space_sdk.delete_user_attribute_definition(attribute) end } end # ------------------------------------------------------------------------------ - # Migrate Kapp Security Policy Definitions + # Update User Profile Attributes # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/securityPolicyDefinitions.json") - sourceSecurtyPolicyArray = [] - destinationSecurtyPolicyArray = (space_sdk.find_security_policy_definitions(kapp['slug']).content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} - securityPolicyDefinitions = JSON.parse(File.read(file)) - (securityPolicyDefinitions || []).each { |attribute| - if destinationSecurtyPolicyArray.include?(attribute['name']) - space_sdk.update_security_policy_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_security_policy_definition(kapp['slug'], attribute) - end - sourceSecurtyPolicyArray.push(attribute['name']) - } + def update_user_profile_attributes(core_path) + sourceUserProfileAttributeArray = [] + destinationUserProfileAttributeArray = ($space_sdk.find_user_profile_attribute_definitions().content['userProfileAttributeDefinitions'] || {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/userProfileAttributeDefinitions.json") + userProfileAttributeDefinitions = JSON.parse(File.read(file)) + + userProfileAttributeDefinitions.each { |attribute| + if destinationUserProfileAttributeArray.include?(attribute['name']) + $space_sdk.update_user_profile_attribute_definition(attribute['name'], attribute) + else + $space_sdk.add_user_profile_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceUserProfileAttributeArray.push(attribute['name']) + } + end - destinationSecurtyPolicyArray.each { | attribute | - if vars["options"]["delete"] && !sourceSecurtyPolicyArray.include?(attribute) - space_sdk.delete_security_policy_definition(kapp['slug'],attribute) + destinationUserProfileAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceUserProfileAttributeArray.include?(attribute) + $space_sdk.delete_user_profile_attribute_definition(attribute) end } end - + + + # ------------------------------------------------------------------------------ - # Migrate Kapp Categories + # Update Team Attributes # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categories.json") - sourceCategoryArray = [] - destinationCategoryArray = (space_sdk.find_categories(kapp['slug']).content['categories'] || {}).map { |definition| definition['slug']} - categories = JSON.parse(File.read(file)) - (categories || []).each { |attribute| - if destinationCategoryArray.include?(attribute['slug']) - space_sdk.update_category_on_kapp(kapp['slug'], attribute['slug'], attribute) - else - space_sdk.add_category_on_kapp(kapp['slug'], attribute) + def update_team_attributes( core_path) + sourceTeamAttributeArray = [] + destinationTeamAttributeArray = ($space_sdk.find_team_attribute_definitions().content['teamAttributeDefinitions']|| {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/teamAttributeDefinitions.json") + teamAttributeDefinitions = JSON.parse(File.read(file)) + teamAttributeDefinitions.each { |attribute| + if destinationTeamAttributeArray.include?(attribute['name']) + $space_sdk.update_team_attribute_definition(attribute['name'], attribute) + else + $space_sdk.add_team_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceTeamAttributeArray.push(attribute['name']) + } + end + + destinationTeamAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceTeamAttributeArray.include?(attribute) + $space_sdk.delete_team_attribute_definition(attribute) end - sourceCategoryArray.push(attribute['slug']) } - # ------------------------------------------------------------------------------ - # Delete Kapp Categories - # ------------------------------------------------------------------------------ - - destinationCategoryArray.each { | attribute | - if vars["options"]["delete"] && !sourceCategoryArray.include?(attribute) - space_sdk.delete_category_on_kapp(kapp['slug'],attribute) + end + + + # ------------------------------------------------------------------------------ + # Update Datastore Attributes + # ------------------------------------------------------------------------------ + def update_datastore_attributes( core_path) + sourceDatastoreAttributeArray = [] + destinationDatastoreAttributeArray =($space_sdk.find_datastore_form_attribute_definitions().content['datastoreFormAttributeDefinitions'] || {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/datastoreFormAttributeDefinitions.json") + datastoreFormAttributeDefinitions = JSON.parse(File.read(file)) + datastoreFormAttributeDefinitions.each { |attribute| + if destinationDatastoreAttributeArray.include?(attribute['name']) + $space_sdk.update_datastore_form_attribute_definition(attribute['name'], attribute) + else + $space_sdk.add_datastore_form_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceDatastoreAttributeArray.push(attribute['name']) + } + end + + destinationDatastoreAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceDatastoreAttributeArray.include?(attribute) + #Delete form is disabled + #$space_sdk.delete_datastore_form_attribute_definition(attribute) end } end + # ------------------------------------------------------------------------------ - # import space webhooks + # Update Security Policy # ------------------------------------------------------------------------------ - sourceSpaceWebhooksArray = [] - destinationSpaceWebhooksArray = (space_sdk.find_webhooks_on_space().content['webhooks'] || {}).map{ |webhook| webhook['name']} - - Dir["#{core_path}/space/webhooks/*.json"].each{ |file| - webhook = JSON.parse(File.read(file)) - if destinationSpaceWebhooksArray.include?(webhook['name']) - space_sdk.update_webhook_on_space(webhook['name'], webhook) - elsif - space_sdk.add_webhook_on_space(webhook) + def update_security_policy( core_path) + sourceSecurityPolicyArray = [] + destinationSecurityPolicyArray = ($space_sdk.find_space_security_policy_definitions().content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/securityPolicyDefinitions.json") + securityPolicyDefinitions = JSON.parse(File.read(file)) + securityPolicyDefinitions.each { |attribute| + if destinationSecurityPolicyArray.include?(attribute['name']) + $space_sdk.update_space_security_policy_definition(attribute['name'], attribute) + else + $space_sdk.add_space_security_policy_definition(attribute) + end + sourceSecurityPolicyArray.push(attribute['name']) + } end - sourceSpaceWebhooksArray.push(webhook['name']) - } + + destinationSecurityPolicyArray.each { | attribute | + if vars["options"]["delete"] && !sourceSecurityPolicyArray.include?(attribute) + $space_sdk.delete_space_security_policy_definition(attribute) + end + } + end # ------------------------------------------------------------------------------ - # delete space webhooks - # TODO: A method doesn't exist for deleting the webhook + # Delete Space Web APIs + # Delete any Web APIs from the destination which are missing from the import data # ------------------------------------------------------------------------------ + def delete_space_web_apis( core_path) + destinationSpaceWebApisArray.each { | webApi | + if vars["options"]["delete"] && !sourceSpaceWebApisArray.include?(webApi) + $space_sdk.delete_space_webapi(webApi) + end + } + end - destinationSpaceWebhooksArray.each do |webhook| - if vars["options"]["delete"] && !sourceSpaceWebhooksArray.include?(webhook) - space_sdk.delete_webhook_on_space(webhook) - end - end # ------------------------------------------------------------------------------ - # Migrate Kapp Webhooks + # import datastore forms # ------------------------------------------------------------------------------ - sourceWebhookArray = [] - webhooks_on_kapp = space_sdk.find_webhooks_on_kapp(kapp['slug']) - - if webhooks_on_kapp.code=="200" - destinationWebhookArray = (webhooks_on_kapp.content['webhooks'] || {}).map { |definition| definition['name']} - Dir["#{core_path}/space/kapps/#{kapp['slug']}/webhooks/*.json"].each{ |webhookFile| - webhookDef = JSON.parse(File.read(webhookFile)) - if destinationWebhookArray.include?(webhookDef['name']) - space_sdk.update_webhook_on_kapp(kapp['slug'], webhookDef['name'], webhookDef) - else - space_sdk.add_webhook_on_kapp(kapp['slug'], webhookDef) - end - sourceWebhookArray.push(webhookDef['name']) - } - - # ------------------------------------------------------------------------------ - # Delete Kapp Webhooks - # ------------------------------------------------------------------------------ - destinationWebhookArray.each { | attribute | - if vars["options"]["delete"] && !sourceWebhookArray.include?(attribute) - space_sdk.delete_webhook_on_kapp(kapp['slug'],attribute) + + def import_datastore_forms( core_path) + $logger.info "Importing datastore forms for #{vars["core"]["space_slug"]}" + #TODO - Suffers from 1000 query limit + destinationDatastoreForms = [] #From destination server + sourceDatastoreForms = [] #From import data + destinationDatastoreForms = ($space_sdk.find_datastore_forms().content['forms'] || {}).map{ |datastore| datastore['slug']} + Dir["#{core_path}/space/datastore/forms/*.json"].each { |datastore| + body = JSON.parse(File.read(datastore)) + sourceDatastoreForms.push(body['slug']) + if destinationDatastoreForms.include?(body['slug']) + $space_sdk.update_datastore_form(body['slug'], body) + else + $space_sdk.add_datastore_form(body) end } - end + end # ------------------------------------------------------------------------------ - # Add Kapp Forms + # delete datastore forms + # Delete any form from the destination which are missing from the import data # ------------------------------------------------------------------------------ - - if (forms = Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/*.json"]).length > 0 - sourceForms = [] #From import data - destinationForms = (space_sdk.find_forms(kapp['slug']).content['forms'] || {}).map{ |form| form['slug']} - forms.each { |form| - properties = File.read(form) - form = JSON.parse(properties) - sourceForms.push(form['slug']) - if destinationForms.include?(form['slug']) - space_sdk.update_form(kapp['slug'] ,form['slug'], form) - else - space_sdk.add_form(kapp['slug'], form) + def delete_datastore_forms(core_path) + destinationDatastoreForms.each { |datastore_slug| + if vars["options"]["delete"] && !sourceDatastoreForms.include?(datastore_slug) + $space_sdk.delete_datastore_form(datastore_slug) end } - # ------------------------------------------------------------------------------ - # delete forms - # ------------------------------------------------------------------------------ - destinationForms.each { |slug| - if vars["options"]["delete"] && !sourceForms.include?(slug) - #Delete form is disabled - #space_sdk.delete_form(kapp['slug'], slug) - end - } end - + + + # ------------------------------------------------------------------------------ - # Import Kapp Form Data + # Import Datastore Data # ------------------------------------------------------------------------------ - Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/**/submissions*.ndjson"].sort.each { |filename| - dir = File.dirname(filename) - form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] - - # This code could delete all submissions form the form before importing new data - # It is commented out because it could be dangerous to have in place and the delete_submission method doesn't exist currently. - #(space_sdk.find_all_form_submissions(kapp['slug'], form_slug).content['submissions'] || []).each { |submission| - # space_sdk.delete_submission(submission['id']) - #} - - File.readlines(filename).each { |line| - submission = JSON.parse(line) - submission["values"].map { |field, value| - # if the value contains an array of files - if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') - value.map.with_index { |file, index| - # add 'path' key to the attribute value indicating the location of the attachment - file['path'] = "#{dir}#{file['path']}" - } - end + + def import_datastore_data( core_path) + Dir["#{core_path}/space/datastore/forms/**/submissions*.ndjson"].sort.each { |filename| + dir = File.dirname(filename) + form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] + ($space_sdk.find_all_form_datastore_submissions(form_slug).content['submissions'] || []).each { |submission| + $space_sdk.delete_datastore_submission(submission['id']) } - body = { - "values" => submission["values"], - "coreState" => submission["coreState"] + File.readlines(filename).each { |line| + submission = JSON.parse(line) + submission["values"].map { |field, value| + # if the value contains an array of files + if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') + value.map.with_index { |file, index| + # add 'path' key to the attribute value indicating the location of the attachment + file['path'] = "#{dir}#{file['path']}" + } + end + } + body = { + "values" => submission["values"], + "coreState" => submission["coreState"] + } + $space_sdk.add_datastore_submission(form_slug, body).content } - space_sdk.add_submission(kapp['slug'], form_slug, body).content } - } - # ------------------------------------------------------------------------------ - # Add Kapp Web APIs - # ------------------------------------------------------------------------------ - sourceWebApisArray = [] - destinationWebApisArray = (space_sdk.find_kapp_webapis(kapp['slug']).content['webApis'] || {}).map { |definition| definition['slug']} - Dir["#{core_path}/space/kapps/#{kapp['slug']}/webApis/*"].each { |webApi| - body = JSON.parse(File.read(webApi)) - if destinationWebApisArray.include?(body['slug']) - space_sdk.update_kapp_webapi(kapp['slug'], body['slug'], body) - else - space_sdk.add_kapp_webapi(kapp['slug'], body) - end - sourceWebApisArray.push(body['slug']) - } + end + # ------------------------------------------------------------------------------ - # Delete Kapp Web APIs + # import space teams # ------------------------------------------------------------------------------ - destinationWebApisArray.each { | webApi | - if vars["options"]["delete"] && !sourceWebApisArray.include?(webApi) - space_sdk.delete_kapp_webapi(kapp['slug'], webApi) - end - } -} -# ------------------------------------------------------------------------------ -# task -# ------------------------------------------------------------------------------ + def import_space_teams( core_path) + + if (teams = Dir["#{core_path}/space/teams/*.json"]).length > 0 + sourceTeamArray = [] + destinationTeamsArray = ($space_sdk.find_teams({"include"=>"details"}).content['teams'] || {}).map{ |team| {"slug" => team['slug'], "name"=>team['name'], "updatedAt"=>team['updatedAt']} } + teams.each{ |team| + body = JSON.parse(File.read(team)) + destinationTeam = destinationTeamsArray.find {|destination_team| destination_team['slug'] == body['slug']} + if !destination_team.nil? + #If no updates, skip + if destination_team['updatedAt'] != team['updatedAt'] + $space_sdk.update_team(body['slug'], body) + else -task_sdk = KineticSdk::Task.new({ - app_server_url: "#{vars["task"]["server_url"]}", - username: vars["task"]["service_user_username"], - password: vars["task"]["service_user_password"], - options: http_options.merge({ export_directory: "#{task_path}" }) -}) + end -# ------------------------------------------------------------------------------ -# task import -# ------------------------------------------------------------------------------ + else + $space_sdk.add_team(body) + end + #Add Attributes to the Team + (body['attributes'] || []).each{ | attribute | + $space_sdk.add_team_attribute(body['name'], attribute['name'], attribute['values']) + } + sourceTeamArray.push({'name' => body['name'], 'slug'=>body['slug']} ) + } + destinationTeamsArray.each { |team| + #if !SourceTeamArray.include?(team) + if sourceTeamArray.find {|source_team| source_team['slug'] == team['slug'] }.nil? + #Delete has been disabled. It is potentially too dangerous to include w/o advanced knowledge. + #$space_sdk.delete_team(team['slug']) + end + } + end + end -logger.info "Importing the task components for the \"#{template_name}\" template." -logger.info " importing with api: #{task_sdk.api_url}" + # ------------------------------------------------------------------------------ + # Import Kapp Categories + # ------------------------------------------------------------------------------ + def import_kapp_categories(core_path) + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categories.json") + sourceCategoryArray = [] + destinationCategoryArray = ($space_sdk.find_categories(kapp['slug']).content['categories'] || {}).map { |definition| definition['slug']} + categories = JSON.parse(File.read(file)) + (categories || []).each { |attribute| + if destinationCategoryArray.include?(attribute['slug']) + $space_sdk.update_category_on_kapp(kapp['slug'], attribute['slug'], attribute) + else + $space_sdk.add_category_on_kapp(kapp['slug'], attribute) + end + sourceCategoryArray.push(attribute['slug']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Categories + # ------------------------------------------------------------------------------ + + destinationCategoryArray.each { | attribute | + if vars["options"]["delete"] && !sourceCategoryArray.include?(attribute) + $space_sdk.delete_category_on_kapp(kapp['slug'],attribute) + end + } + end + end + ################################################################################ + # Helpers + ################################################################################ + + #Configuration Selection + def config_selection(config_folder_path) + + #Ensure config folder exists + if !File.directory?(config_folder_path) + $logger.info "Config folder not found at #{config_folder_path}" + puts "Cannot find config folder!" + puts "Exiting..." + gets + exit + end -# ------------------------------------------------------------------------------ -# task handlers -# ------------------------------------------------------------------------------ + # #Determine Config file to use + config_exts = ['.yaml','.yml'] + configArray = [] + $logger.info "Checking #{config_folder_path} for config files" + #Check config folder for yaml/yml files containing the word 'import' + begin + Find.find("#{config_folder_path}/") do |file| + configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('import')) + end + rescue error + #No config files found in config folder + $logger.error "Error finding default config file path!" + $logger.error "Error reported: #{error}" + puts "Cannot find config files in default path! (#{pwd})" + puts "Exiting script..." + gets + exit + end + $logger.info "Found config files" -# import handlers forcing overwrite -task_sdk.import_handlers(true) + #Print config file options with number indicators to select + puts "Select your config file" + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + $logger.info "Select section" + begin + print "Selection (0 to repeat options): " + sel = gets.chomp.to_i + begin + if sel === 0 + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + next + end + configFile = configArray[sel-1] + $logger.info "Option #{sel} - #{configFile}" + break + rescue + $logger.info "Error selecting config file! Exiting..." + puts "Error selecting config file!" + puts "Exiting..." + gets + exit + end + end while true + return configFile + end + + #Check if nil/unencoded and update accordingly +def SecurePWD(file,vars,pwdAttribute) + #If no pwd, then ask for one, otherwise take current string that was not found to be B64 and convert + if [pwdAttribute]["service_user_password"].nil? + password = IO::console.getpass "Enter Password(#{pwdAttribute}): " + else + password = vars[pwdAttribute]["service_user_password"] + end + enc = Base64.strict_encode64(password) + vars[pwdAttribute]["service_user_password"] = enc.to_s + begin + fileObj = File.open(file, 'w') + puts "Updated pwd in #{pwdAttribute} to #{enc}" + fileObj.write vars.to_yaml + #{ |f| f.write vars.to_yaml } + rescue ArgumentError + $logger.error("There was an error while updating variables file:") + $logger.error(ArgumentError) + ensure + fileObj.close + end +end -# ------------------------------------------------------------------------------ -# Import Task Trees and Routines -# ------------------------------------------------------------------------------ +#Decode password to utilize +def DecodePWD(file, vars, pwdLoc) + pwdAttribute = vars[pwdLoc]["service_user_password"] + return Base64.decode64(pwdAttribute) +end -# import routines and force overwrite -task_sdk.import_routines(true) -# import trees and force overwrite -task_sdk.import_trees(true) +#Confirm passwords exist and are in a proper format, call SecurePWD for any exceptions +def ValidatePWD(file, vars) + $pwdFields.each do |field| + t = vars[field]["service_user_password"] + #See if not a string, not encoded, or default + if !t.is_a?(String) || Base64.strict_encode64(Base64.decode64(t)) != t || t === "" + puts "Updating password #{t}" + SecurePWD(file, vars, field) + end + end +end +def convert_json_to_csv(json_file) + csv_file = json_file.gsub("ndjson","csv") + CSV.open(csv_file, 'w') do |csv| + File.foreach(json_file).with_index do |line, index| + record = JSON.parse(line) + + # Write header on first row + csv << record.keys if index == 0 + + # Write values + csv << record.values + end + end + end + def compare_forms(kapp_slug, old_form) + end -# ------------------------------------------------------------------------------ -# import task categories -# ------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ + # Migrate Kapp Form Attribute Definitions + # ------------------------------------------------------------------------------ + def import_kapp_form_attribute_definitions(core_path,kapp) + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formAttributeDefinitions.json") + sourceFormAttributeArray = [] + destinationFormAttributeArray = ($space_sdk.find_form_attribute_definitions(kapp['slug']).content['formAttributeDefinitions'] || {}).map { |definition| definition['name']} + formAttributeDefinitions = JSON.parse(File.read(file)) + (formAttributeDefinitions || []).each { |attribute| + if destinationFormAttributeArray.include?(attribute['name']) + $space_sdk.update_form_attribute_definition(kapp['slug'], attribute['name'], attribute) + else + $space_sdk.add_form_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceFormAttributeArray.push(attribute['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Form Attribute Definitions + # ------------------------------------------------------------------------------ + destinationFormAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceFormAttributeArray.include?(attribute) + $space_sdk.delete_form_attribute_definition(kapp['slug'],attribute) + end + } + end + end -sourceCategories = [] #From import data -destinationCategories = (task_sdk.find_categories().content['categories'] || {}).map{ |category| category['name']} + # ------------------------------------------------------------------------------ + # import bridge models + # *NOTE* - This if the bridge doesn't exist the model will be imported w/ an empty "Bridge Slug" value. + # ------------------------------------------------------------------------------ + def import_bridge_models(core_path,vars) + destinationModels = $space_sdk.find_bridge_models() + destinationModels_Array = (destinationModels.content['models'] || {}).map{ |model| model['name']} + + Dir["#{core_path}/space/models/*.json"].each{ |model| + body = JSON.parse(File.read(model)) + if destinationModels_Array.include?(body['name']) + $space_sdk.update_bridge_model(body['name'], body) + elsif + $space_sdk.add_bridge_model(body) + end + } + sourceModelsArray = Dir["#{core_path}/space/models/*.json"].map{ |model| JSON.parse(File.read(model))['name'] } -Dir["#{task_path}/categories/*.json"].each { |file| - category = JSON.parse(File.read(file)) - sourceCategories.push(category['name']) - if destinationCategories.include?(category['name']) - task_sdk.update_category(category['name'], category) - else - task_sdk.add_category(category) + destinationModels_Array.each do |model| + if vars["options"]["delete"] && !sourceModelsArray.include?(model) + $space_sdk.delete_bridge_model(model) + end + end end -} -# ------------------------------------------------------------------------------ -# delete task categories -# ------------------------------------------------------------------------------ + def import_kapp_form_data(core_path,kapp) + + promises = [] + Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/**/submissions*.ndjson"].sort.each { |filename| + promises << Concurrent::Promise.execute(executor: $pool) do + begin + dir = File.dirname(filename) + form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] + + #TODO - Get path to ndjson + #Convert to csv + #Import CSV + convert_json_to_csv(filename) + $space_sdk.import_submissions_csv(kapp['slug'],form_slug,body).content + + #How much of the code below do I need to integrate with above? + + ## This code could delete all submissions from the form before importing new data + ## It is commented out because it could be dangerous to have in place and the delete_submission method doesn't exist currently. + #($space_sdk.find_all_form_submissions(kapp['slug'], form_slug).content['submissions'] || []).each { |submission| + # $space_sdk.delete_submission(submission['id']) + #} + + File.readlines(filename).each { |line| + submission = JSON.parse(line) + submission["values"].map { |field, value| + # if the value contains an array of files + if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') + value.map.with_index { |file, index| + # add 'path' key to the attribute value indicating the location of the attachment + file['path'] = "#{dir}#{file['path']}" + } + end + } + body = { + "values" => submission["values"], + "coreState" => submission["coreState"] + } + $space_sdk.add_submission(kapp['slug'], form_slug, body).content + } + rescue => e + $mutex.synchronize do + $logger.error("Failed to import form data from : #{e.message}") + $logger.error(e.backtrace.join("\n")) + end + raise + end + end + } + promises.each(&:wait!) -destinationCategories.each { |category| - if vars["options"]["delete"] && !sourceCategories.include?(category) - task_sdk.delete_category(category) - end -} + $mutex.synchronize { $logger.info("Finished importing form data for kapp #{kapp['slug']}") } -# ------------------------------------------------------------------------------ -# import task policy rules -# ------------------------------------------------------------------------------ + end -destinationPolicyRuleArray = task_sdk.find_policy_rules().content['policyRules'] -sourcePolicyRuleArray = Dir["#{task_path}/policyRules/*.json"].map{ |file| - rule = JSON.parse(File.read(file)) - {"name" => rule['name'], "type" => rule['type']} - } + # ------------------------------------------------------------------------------ + # Import Space Web APIs + # ------------------------------------------------------------------------------ -Dir["#{task_path}/policyRules/*.json"].each { |file| - rule = JSON.parse(File.read(file)) - if !destinationPolicyRuleArray.find {|dest_rule| dest_rule['name']==rule['name'] && dest_rule['type']==rule['type'] }.nil? - task_sdk.update_policy_rule(rule.slice('type', 'name'), rule) - else - task_sdk.add_policy_rule(rule) - end -} - -# ------------------------------------------------------------------------------ -# delete task policy rules -# ------------------------------------------------------------------------------ -destinationPolicyRuleArray.each { |rule| - if vars["options"]["delete"] && sourcePolicyRuleArray.find {|source_rule| source_rule['name']==rule['name'] && source_rule['type']==rule['type'] }.nil? - task_sdk.delete_policy_rule(rule) - end -} - -# ------------------------------------------------------------------------------ -# Delete Trees and Routines not in the Source Data -# ------------------------------------------------------------------------------ - -# identify Trees and Routines on destination -destinationtrees = [] -trees = task_sdk.find_trees().content -(trees['trees'] || []).each { |tree| - destinationtrees.push( tree['title'] ) -} - -# identify Routines in source data -sourceTrees = [] -Dir["#{task_path}/routines/*.xml"].each {|routine| - doc = Document.new(File.new(routine)) - root = doc.root - sourceTrees.push("#{root.elements["taskTree/name"].text}") -} -# identify trees in source data -Dir["#{task_path}/sources/*"].each {|source| - if File.directory? source - Dir["#{source}/trees/*.xml"].each { |tree| - doc = Document.new(File.new(tree)) - root = doc.root - tree = "#{root.elements["sourceName"].text} :: #{root.elements["sourceGroup"].text} :: #{root.elements["taskTree/name"].text}" - sourceTrees.push(tree) + def import_space_web_apis(core_path) + sourceSpaceWebApisArray = [] + destinationSpaceWebApisArray = ($space_sdk.find_space_webapis().content['webApis'] || {}).map { |definition| definition['slug']} + promises = [] + Dir["#{core_path}/space/webApis/*"].each{ |file| + promises << Concurrent::Promise.execute(executor: $pool) do + begin + body = JSON.parse(File.read(file)) + if destinationSpaceWebApisArray.include?(body['slug']) + $space_sdk.update_space_webapi(body['slug'], body) + else + $space_sdk.add_space_webapi(body) + end + sourceSpaceWebApisArray.push(body['slug']) + rescue + end + end } + promises.each(&:wait!) + end + # ------------------------------------------------------------------------------ + # Migrate Kapp Attribute Definitions + # ------------------------------------------------------------------------------ + def import_kapp_attribute_definitions(core_path,kapp) + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/kappAttributeDefinitions.json") + sourceKappAttributeArray = [] + destinationKappAttributeArray = ($space_sdk.find_kapp_attribute_definitions(kapp['slug']).content['kappAttributeDefinitions'] || {}).map { |definition| definition['name']} + kappAttributeDefinitions = JSON.parse(File.read(file)) + (kappAttributeDefinitions || []).each { |attribute| + if destinationKappAttributeArray.include?(attribute['name']) + $space_sdk.update_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute) + else + $space_sdk.add_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceKappAttributeArray.push(attribute['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Attribute Definitions + # ------------------------------------------------------------------------------ + destinationKappAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceKappAttributeArray.include?(attribute) + $space_sdk.delete_kapp_attribute_definition(kapp['slug'],attribute) + end + } + end + end + # ------------------------------------------------------------------------------ + # Import Kapp Forms + # ------------------------------------------------------------------------------ + def import_forms(core_path,kapp, vars) + if (forms = Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/*.json"]).length > 0 + sourceForms = [] #From import data + #destinationForms = ($space_sdk.find_forms(kapp['slug']).content['forms'] || {}).map{ |form| form['slug']} + destinationForms = ($space_sdk.find_forms(kapp['slug'],{'export'=>'true'}).content['forms'] || {}) + $logger.info ("Iterating kapp forms") + promises = [] + + + forms.each do |form| + promises << Concurrent::Promise.execute(executor: $pool) do + begin + properties = File.read(form) + form = JSON.parse(properties) + $mutex.synchronize do + $logger.info "Currently #{form['slug']}" + sourceForms.push(form['slug']) + end + + prev_form = (destinationForms.find { |f| f["slug"] == form['slug'] }) + if !prev_form.nil? + #Compare old and new forms + #$space_sdk.compare_forms(destinationForms["#{form['slug']}"], form ) + #Check last updated date/time and compare + $mutex.synchronize { $logger.info("Comparing previous and current form exports for #{form['slug']}") } + match = (form == prev_form) + #Skip if forms match + if !match + $mutex.synchronize { $logger.info("Updating form #{form['slug']}") } + $space_sdk.update_form(kapp['slug'] ,form['slug'], form) + else + $mutex.synchronize { $logger.info("Form #{form['slug']} updatedAt values match, skipping...") } + end + else + $mutex.synchronize { $logger.info("Adding new form #{form['slug']}") } + $space_sdk.add_form(kapp['slug'], form) + end + rescue => e + $mutex.synchronize do + $logger.error("Failed to import form from #{form_file}: #{e.message}") + $logger.error(e.backtrace.join("\n")) + end + raise + end + end + end + + promises.each(&:wait!) + + $mutex.synchronize { $logger.info("Finished importing #{sourceForms.size} forms for kapp #{kapp['slug']}") } + + # ------------------------------------------------------------------------------ + # delete forms + # ------------------------------------------------------------------------------ + destinationForms.each { |slug| + if vars["options"]["delete"] && !sourceForms.include?(slug) + #Delete form is disabled + #$space_sdk.delete_form(kapp['slug'], slug) + end + } + end + end + # ------------------------------------------------------------------------------ + # Migrate Kapp Category Definitions + # ------------------------------------------------------------------------------ + def import_kapp_category_definitions(core_path,kapp) + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categoryAttributeDefinitions.json") + sourceKappCategoryArray = [] + destinationKappAttributeArray = ($space_sdk.find_category_attribute_definitions(kapp['slug']).content['categoryAttributeDefinitions'] || {}).map { |definition| definition['name']} + kappCategoryDefinitions = JSON.parse(File.read(file)) + (kappCategoryDefinitions || []).each { |attribute| + if destinationKappAttributeArray.include?(attribute['name']) + $space_sdk.update_category_attribute_definition(kapp['slug'], attribute['name'], attribute) + else + $space_sdk.add_category_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceKappCategoryArray.push(attribute['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Category Definitions + # ------------------------------------------------------------------------------ + destinationKappAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceKappCategoryArray.include?(attribute) + $space_sdk.delete_category_attribute_definition(kapp['slug'],attribute) + end + } + end end -} -# Delete the extra tress and routines on the source -destinationtrees.each { | tree | - if vars["options"]["delete"] && !sourceTrees.include?(tree) - treeDef = tree.split(' :: ') - task_sdk.delete_tree( tree ) + def import_kapp_form_type_definitions(core_pathh,kapp) + # ------------------------------------------------------------------------------ + # Migrate Kapp Form Type Definitions + # ------------------------------------------------------------------------------ + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formTypes.json") + sourceFormTypesArray = [] + destinationFormTypesArray = ($space_sdk.find_formtypes(kapp['slug']).content['formTypes'] || {}).map { |formTypes| formTypes['name']} + formTypes = JSON.parse(File.read(file)) + (formTypes || []).each { |body| + if destinationFormTypesArray.include?(body['name']) + $space_sdk.update_formtype(kapp['slug'], body['name'], body) + else + $space_sdk.add_formtype(kapp['slug'], body) + end + sourceFormTypesArray.push(body['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Form Type Definitions + # ------------------------------------------------------------------------------ + destinationFormTypesArray.each { | name | + if vars["options"]["delete"] && !sourceFormTypesArray.include?(name) + $space_sdk.delete_formtype(kapp['slug'],name) + end + } + end + end + def import_kapp_web_apis(core_path,kapp) + # ------------------------------------------------------------------------------ + # Add Kapp Web APIs + # ------------------------------------------------------------------------------ + sourceWebApisArray = [] + destinationWebApisArray = ($space_sdk.find_kapp_webapis(kapp['slug']).content['webApis'] || {}).map { |definition| definition['slug']} + Dir["#{core_path}/space/kapps/#{kapp['slug']}/webApis/*"].each { |webApi| + body = JSON.parse(File.read(webApi)) + if destinationWebApisArray.include?(body['slug']) + $space_sdk.update_kapp_webapi(kapp['slug'], body['slug'], body) + else + $space_sdk.add_kapp_webapi(kapp['slug'], body) + end + sourceWebApisArray.push(body['slug']) + } end -} -# Import v6 workflows as these are not not the same as Trees and Routines -logger.info "Importing workflows" -space_sdk.import_workflows(vars["core"]["space_slug"]) + def import_kapp_security_policy_definitions(core_path,kapp) + # ------------------------------------------------------------------------------ + # Migrate Kapp Security Policy Definitions + # ------------------------------------------------------------------------------ + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/securityPolicyDefinitions.json") + sourceSecurtyPolicyArray = [] + destinationSecurtyPolicyArray = ($space_sdk.find_security_policy_definitions(kapp['slug']).content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} + securityPolicyDefinitions = JSON.parse(File.read(file)) + (securityPolicyDefinitions || []).each { |attribute| + if destinationSecurtyPolicyArray.include?(attribute['name']) + $space_sdk.update_security_policy_definition(kapp['slug'], attribute['name'], attribute) + else + $space_sdk.add_security_policy_definition(kapp['slug'], attribute) + end + sourceSecurtyPolicyArray.push(attribute['name']) + } -# ------------------------------------------------------------------------------ -# complete -# ------------------------------------------------------------------------------ + destinationSecurtyPolicyArray.each { | attribute | + if vars["options"]["delete"] && !sourceSecurtyPolicyArray.include?(attribute) + $space_sdk.delete_security_policy_definition(kapp['slug'],attribute) + end + } + end + end -logger.info "Finished importing the \"#{template_name}\" forms." +starting = Process.clock_gettime(Process::CLOCK_MONOTONIC) +import_space() +ending = Process.clock_gettime(Process::CLOCK_MONOTONIC) +elapsed = ending - starting +puts "Time: #{elapsed}" \ No newline at end of file