From 92a48d676112b80a8b2a5b3b9f87957dd809878b Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Tue, 25 Jul 2023 09:27:56 -0500 Subject: [PATCH 01/19] Enhancement to store exports into slug-based folders in exports --- export.rb | 20 ++++++++++++++------ import.rb | 7 +++++-- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/export.rb b/export.rb index bf23292..5da977c 100644 --- a/export.rb +++ b/export.rb @@ -83,8 +83,7 @@ # determine the directory paths platform_template_path = File.dirname(File.expand_path(__FILE__)) -core_path = File.join(platform_template_path, "core") -task_path = File.join(platform_template_path, "task") + # ------------------------------------------------------------------------------ # methods @@ -135,8 +134,9 @@ def remove_discussion_id_attribute(model) # core # ------------------------------------------------------------------------------ -logger.info "Removing files and folders from the existing \"#{template_name}\" template." -FileUtils.rm_rf Dir.glob("#{core_path}/*") +#Setting core paths +core_path = File.join(platform_template_path, "exports", vars['core']['space_slug'], "core") +task_path = File.join(platform_template_path, "exports", vars['core']['space_slug'], "task") logger.info "Setting up the Core SDK" @@ -148,6 +148,11 @@ def remove_discussion_id_attribute(model) options: http_options.merge({ export_directory: "#{core_path}" }) }) + + +logger.info "Removing files and folders from the existing \"#{template_name}\" template." +FileUtils.rm_rf Dir.glob("#{core_path}/*") + # fetch export from core service and write to export directory logger.info "Exporting the core components for the \"#{template_name}\" template." logger.info " exporting with api: #{space_sdk.api_url}" @@ -266,8 +271,7 @@ def remove_discussion_id_attribute(model) # ------------------------------------------------------------------------------ # task # ------------------------------------------------------------------------------ -logger.info "Removing files and folders from the existing \"#{template_name}\" template." -FileUtils.rm_rf Dir.glob("#{task_path}/*") + task_sdk = KineticSdk::Task.new({ app_server_url: "#{vars["task"]["server_url"]}", @@ -276,6 +280,10 @@ def remove_discussion_id_attribute(model) options: http_options.merge({ export_directory: "#{task_path}" }) }) + +logger.info "Removing files and folders from the existing \"#{template_name}\" template." +FileUtils.rm_rf Dir.glob("#{task_path}/*") + logger.info "Exporting the task components for the \"#{template_name}\" template." logger.info " exporting with api: #{task_sdk.api_url}" diff --git a/import.rb b/import.rb index bb9933d..d7a7285 100644 --- a/import.rb +++ b/import.rb @@ -82,8 +82,7 @@ # determine the directory paths platform_template_path = File.dirname(File.expand_path(__FILE__)) -core_path = File.join(platform_template_path, "core") -task_path = File.join(platform_template_path, "task") + # ------------------------------------------------------------------------------ # methods @@ -117,6 +116,10 @@ raise "Config file not found: #{file}" end +#Setting core paths +core_path = File.join(platform_template_path, "exports", vars['core']['old_space_slug'], "core") +task_path = File.join(platform_template_path, "exports", vars['core']['old_space_slug'], "task") + # Set http_options based on values provided in the config file. http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| result[k.to_sym] = v From 6a9b3827bcf88f7594b400a1e0c05b68d1fb81c8 Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Tue, 25 Jul 2023 09:29:23 -0500 Subject: [PATCH 02/19] Enhancement to store exports in slug-based folders inside exports --- StandAlone bundle/export.rb | 373 ++++++++++++++ StandAlone bundle/import.rb | 933 ++++++++++++++++++++++++++++++++++++ 2 files changed, 1306 insertions(+) create mode 100644 StandAlone bundle/export.rb create mode 100644 StandAlone bundle/import.rb diff --git a/StandAlone bundle/export.rb b/StandAlone bundle/export.rb new file mode 100644 index 0000000..00df24d --- /dev/null +++ b/StandAlone bundle/export.rb @@ -0,0 +1,373 @@ +# RUNNING THE SCRIPT: +# ruby export.rb -c "<>" +# ruby export.rb -c "config/foo-web-server.rb" +# +# Example Config File Values (See Readme for additional details) +# +# +=begin yml config file example + + --- + core: + # server_url: https://.kinops.io OR https://.com/kinetic/ + server_url: https://web-server.com + space_slug: + space_name: + service_user_username: + service_user_password: + options: + SUBMISSIONS_TO_EXPORT: + - datastore: true + formSlug: + + REMOVE_DATA_PROPERTIES: + - createdAt + - createdBy + - updatedAt + - updatedBy + - closedAt + - closedBy + - submittedAt + - submittedBy + - id + - authStrategy + - key + - handle + task: + # server_url: https://.kinops.io/app/components/task OR https://.com/kinetic-task + server_url: https://web-server.com + service_user_username: + service_user_password: + http_options: + log_level: info + log_output: stderr + +=end + + +#Export_Command ocra export.rb --no-dep-run --add-all-core --gem-files C:\Ruby32-x64\bin\ruby_builtin_dlls --gem-scripts C:\Ruby32-x64\lib\ruby\gems\3.2.0\gems\kinetic_sdk-5.0.21 --gemfile ../Gemfile +require 'logger' +require 'json' +require 'optparse' +require 'kinetic_sdk' +require 'Find' +require 'rexml' + +template_name = "platform-template" +# pwd = File.expand_path(File.dirname(__FILE__)) +# pwd = File.path('C:\Users\travis.wiese\Source\repos\platform-template\StandAlone bundle') +pwd = Dir.pwd +# logger = Logger.new(STDERR) +begin + logger = Logger.new("#{pwd}\\output.log") +rescue + logger = Logger.new('C:\Users\travis.wiese\Source\repos\platform-template\StandAlone bundle\output.log') + #TODO - Ask for path of configs +end +logger.level = Logger::INFO +logger.formatter = proc do |severity, datetime, progname, msg| + date_format = datetime.utc.strftime("%Y-%m-%dT%H:%M:%S.%LZ") + "[#{date_format}] #{severity}: #{msg}\n" +end + +logger.info "Base directory: #{pwd}" +# Determine the Present Working Directory + + +#ARGV << '-h' if ARGV.empty? +# The options specified on the command line will be collected in *options*. +# options = {} +# OptionParser.new do |opts| +# opts.banner = "Usage: example.rb [options]" + +# opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| +# options["CONFIG_FILE"] = config +# end + + # No argument, shows at tail. This will print an options summary. + # Try it and see! +# opts.on_tail("-h", "--help", "Show this message") do +# puts opts +# exit +# end +# end.parse! + + + +# determine the directory paths +# platform_template_path = File.dirname(File.expand_path(__FILE__)) +platform_template_path = Dir.pwd +config_folder_path = File.join(platform_template_path,'config') + +if !File.directory?(config_folder_path) + logger.info "Config folder not found at #{config_folder_path}" + puts "Cannot find config folder!" + puts "Exiting..." + gets + exit +end + +# #Determine Config file to use +config_exts = ['.yaml','.yml'] +configArray = [] +logger.info "Checking #{config_folder_path} for config files" +begin + Find.find("#{config_folder_path}/") do |file| + configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('export')) + end +rescue + logger.info "Error finding default config file path!" + puts "Cannot find config files in default path! (#{pwd})" + puts "Exiting script..." + $stdin.gets + exit +end +logger.info "Found config files" + +puts "Select your config file" +configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" +end +logger.info "Sel section" +print "Selection: " +sel = $stdin.gets.chomp.to_i +begin + configFile = configArray[sel-1] + logger.info "Option #{sel} - #{configFile}" +rescue + logger.info "Error selecting config file!" + puts "Error selecting config file!" + puts "Exiting..." + gets + exit +end + + logger.info "Post selection" + + + + +# ------------------------------------------------------------------------------ +# methods +# ------------------------------------------------------------------------------ + +# Removes discussion id attribute from a given model +def remove_discussion_id_attribute(model) + if !model.is_a?(Array) + if model.has_key?("attributes") + scrubbed = model["attributes"].select do |attribute| + attribute["name"] != "Discussion Id" + end + end + model["attributes"] = scrubbed + end + return model +end + +# ------------------------------------------------------------------------------ +# constants +# ------------------------------------------------------------------------------ + + +# ------------------------------------------------------------------------------ +# setup +# ------------------------------------------------------------------------------ + +logger.info "Installing gems for the \"#{template_name}\" template." +Dir.chdir(platform_template_path) { system("bundle", "install") } + +vars = {} + +# Read the config file specified in the command line into the variable "vars" +if File.file?(file = "#{config_folder_path}/#{configFile}") + vars.merge!( YAML.load(File.read(file)) ) +end + +# Set http_options based on values provided in the config file. +http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| + result[k.to_sym] = v +end + +# Set variables based on values provided in the config file. +SUBMISSIONS_TO_EXPORT = vars["options"]["SUBMISSIONS_TO_EXPORT"] +REMOVE_DATA_PROPERTIES = vars["options"]["REMOVE_DATA_PROPERTIES"] + +# ------------------------------------------------------------------------------ +# core +# ------------------------------------------------------------------------------ + +#Setting core paths +core_path = File.join(platform_template_path, "exports", vars['core']['space_slug'], "core") +task_path = File.join(platform_template_path, "exports", vars['core']['space_slug'], "task") + +logger.info "Setting up the Core SDK" + +space_sdk = KineticSdk::Core.new({ + space_server_url: vars["core"]["server_url"], + space_slug: vars["core"]["space_slug"], + username: vars["core"]["service_user_username"], + password: vars["core"]["service_user_password"], + options: http_options.merge({ export_directory: "#{core_path}" }) +}) + + + +logger.info "Removing files and folders from the existing \"#{template_name}\" template." +FileUtils.rm_rf Dir.glob("#{core_path}/*") + +# fetch export from core service and write to export directory +logger.info "Exporting the core components for the \"#{template_name}\" template." +logger.info " exporting with api: #{space_sdk.api_url}" +logger.info " - exporting configuration data (Kapps,forms, etc)" +space_sdk.export_space + +# cleanup properties that should not be committed with export +# bridge keys +Dir["#{core_path}/space/bridges/*.json"].each do |filename| + bridge = JSON.parse(File.read(filename)) + if bridge.has_key?("key") + bridge.delete("key") + File.open(filename, 'w') { |file| file.write(JSON.pretty_generate(bridge)) } + end +end + +# cleanup space +filename = "#{core_path}/space.json" +space = JSON.parse(File.read(filename)) +# filestore key +if space.has_key?("filestore") && space["filestore"].has_key?("key") + space["filestore"].delete("key") +end +# platform components +if space.has_key?("platformComponents") + if space["platformComponents"].has_key?("task") + space["platformComponents"].delete("task") + end + (space["platformComponents"]["agents"] || []).each_with_index do |agent,idx| + space["platformComponents"]["agents"][idx]["url"] = "" + end +end +# rewrite the space file +File.open(filename, 'w') { |file| file.write(JSON.pretty_generate(space)) } + +# cleanup discussion ids +Dir["#{core_path}/**/*.json"].each do |filename| + model = remove_discussion_id_attribute(JSON.parse(File.read(filename))) + File.open(filename, 'w') { |file| file.write(JSON.pretty_generate(model)) } +end + +# export submissions +logger.info "Exporting and writing submission data" +(SUBMISSIONS_TO_EXPORT || []).each do |item| + is_datastore = item["datastore"] || false + logger.info "Exporting - #{is_datastore ? 'datastore' : 'kapp'} form #{item['formSlug']}" + # build directory to write files to + submission_path = is_datastore ? + "#{core_path}/space/datastore/forms/#{item['formSlug']}" : + "#{core_path}/space/kapps/#{item['kappSlug']}/forms/#{item['formSlug']}" + + # get attachment fields from form definition + attachment_form = is_datastore ? + space_sdk.find_datastore_form(item['formSlug'], {"include" => "fields.details"}) : + space_sdk.find_form(item['kappSlug'], item['formSlug'], {"include" => "fields.details"}) + + # get attachment fields from form definition + attachement_files = attachment_form.status == 200 ? attachment_form.content['form']['fields'].select{ | file | file['dataType'] == "file" }.map { | field | field['name'] } : {} + + # set base url for attachments + attachment_base_url = is_datastore ? + "#{space_sdk.api_url.gsub("/app/api/v1", "")}/app/datastore" : + "#{space_sdk.api_url.gsub("/app/api/v1", "")}" + + # create folder to write submission data to + FileUtils.mkdir_p(submission_path, :mode => 0700) + + # build params to pass to the retrieve_form_submissions method + params = {"include" => "details,children,origin,parent,values", "limit" => 1000, "direction" => "ASC"} + + # open the submissions file in write mode + file = File.open("#{submission_path}/submissions.ndjson", 'w'); + + # ensure the file is empty + file.truncate(0) + response = nil + begin + # get submissions from datastore form or form + response = is_datastore ? + space_sdk.find_all_form_datastore_submissions(item['formSlug'], params).content : + space_sdk.find_form_submissions(item['kappSlug'], item['formSlug'], params).content + if response.has_key?("submissions") + # iterate over each submission + (response["submissions"] || []).each do |submission| + # write each attachment to a a dir + submission['values'].select{ |field, value| attachement_files.include?(field)}.each{ |field,value| + submission_id = submission['id'] + # define the dir to contain the attahment + download_dir = "#{submission_path}/#{submission_id}/#{field}" + # evaluate fields with multiple attachments + value.map.with_index{ | attachment, index | + # create folder to write attachment + FileUtils.mkdir_p(download_dir, :mode => 0700) + # dir and file name to write attachment + download_path = "#{download_dir}/#{File.join(".", attachment['name'])}" + # url to retrieve the attachment + url = URI.escape("#{attachment_base_url}/submissions/#{submission_id}/files/#{field}/#{index}/#{attachment['name']}") + # retrieve and write attachment + space_sdk.stream_download_to_file(download_path, url, {}, space_sdk.default_headers) + # add the "path" key to indicate the attachment's location + attachment['path'] = "/#{submission_id}/#{field}/#{attachment['name']}" + } + } + # append each submission (removing the submission unwanted attributes) + file.puts(JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)})) + end + end + params['pageToken'] = response['nextPageToken'] + # get next page of submissions if there are more + end while !response.nil? && !response['nextPageToken'].nil? + # close the submissions file + file.close() +end +logger.info " - submission data export complete" + +# ------------------------------------------------------------------------------ +# task +# ------------------------------------------------------------------------------ + + +task_sdk = KineticSdk::Task.new({ + app_server_url: "#{vars["task"]["server_url"]}", + username: vars["task"]["service_user_username"], + password: vars["task"]["service_user_password"], + options: http_options.merge({ export_directory: "#{task_path}" }) +}) + + +logger.info "Removing files and folders from the existing \"#{template_name}\" template." +FileUtils.rm_rf Dir.glob("#{task_path}/*") + +logger.info "Exporting the task components for the \"#{template_name}\" template." +logger.info " exporting with api: #{task_sdk.api_url}" + +# export all sources, trees, routines, handlers, +# groups, policy rules, categories, and access keys +task_sdk.export_sources() +task_sdk.find_sources().content['sourceRoots'].each do |source| + task_sdk.find_trees({ "source" => source['name'] }).content['trees'].each do |tree| + task_sdk.export_tree(tree['title']) + end +end +task_sdk.export_routines() +task_sdk.export_handlers() +task_sdk.export_groups() +task_sdk.export_policy_rules() +task_sdk.export_categories() +task_sdk.export_access_keys() + + + +# ------------------------------------------------------------------------------ +# complete +# ------------------------------------------------------------------------------ + +logger.info "Finished exporting the \"#{template_name}\" template." diff --git a/StandAlone bundle/import.rb b/StandAlone bundle/import.rb new file mode 100644 index 0000000..7ab1fb3 --- /dev/null +++ b/StandAlone bundle/import.rb @@ -0,0 +1,933 @@ +# NOTES +# This is a migration tool not an installation tool. There are certain expectations that the destination is configured and working. +# Agent Server(s) must be added ahead of migration. /space/settings/platformComponents/agents +# Task Server must be added ahead of migration. /space/settings/platformComponents/task +# Task Sources must be manually maintained +# Bridges must be added ahead of migration. /space/plugins/bridges +# Agent Handlers are not migrated by design. They intentionally must be manually added. +# Teams are not deleted from destination. It could be too dangerous to delete them. + +# TODO + +# RUNNING THE SCRIPT: +# ruby import_script.rb -c "<>" +# ruby import_script -c "config/foo-web-server.rb" +# +# Example Config File Values (See Readme for additional details) +# +=begin yml config file example + --- + core: + # server_url: https://.kinops.io OR https://.com/kinetic/ + server_url: https://web-server.com + space_slug: + space_name: + service_user_username: + service_user_password: + options: + delete: true + task: + # server_url: https://.kinops.io/app/components/task OR https://.com/kinetic-task + server_url: https://web-server.com + service_user_username: + service_user_password: + http_options: + log_level: info + log_output: stderr +=end + +#Export_command ocra import.rb --no-dep-run --add-all-core --gem-files C:\Ruby32-x64\bin\ruby_builtin_dlls --gem-scripts C:\Ruby32-x64\lib\ruby\gems\3.2.0\gems\kinetic_sdk-5.0.21 --gemfile ../Gemfile + +require 'logger' +require 'json' +require 'rexml/document' +require 'optparse' +require 'kinetic_sdk' +include REXML + +template_name = "platform-template" + +logger = Logger.new(STDERR) +logger.level = Logger::INFO +logger.formatter = proc do |severity, datetime, progname, msg| + date_format = datetime.utc.strftime("%Y-%m-%dT%H:%M:%S.%LZ") + "[#{date_format}] #{severity}: #{msg}\n" +end + +######################################### + +# Determine the Present Working Directory +pwd = Dir.pwd + +# ARGV << '-h' if ARGV.empty? + +# # The options specified on the command line will be collected in *options*. +# options = {} +# OptionParser.new do |opts| +# opts.banner = "Usage: example.rb [options]" + +# opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| +# options["CONFIG_FILE"] = config +# end + +# # No argument, shows at tail. This will print an options summary. +# # Try it and see! +# opts.on_tail("-h", "--help", "Show this message") do +# puts opts +# exit +# end +# end.parse! + +# #Now raise an exception if we have not found a CONFIG_FILE option +# raise OptionParser::MissingArgument if options["CONFIG_FILE"].nil? + + +# determine the directory paths +platform_template_path = pwd +config_folder_path = File.join(platform_template_path,'config') + +if !File.directory?(config_folder_path) + logger.info "Config folder not found at #{config_folder_path}" + puts "Cannot find config folder!" + puts "Exiting..." + gets + exit +end + +# #Determine Config file to use +config_exts = ['.yaml','.yml'] +configArray = [] +logger.info "Checking #{config_folder_path} for config files" +Find.find("#{config_folder_path}/") do |file| + configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('import')) +end + +puts "Select your config file" +configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" +end +print "Selection: " +sel = gets.chomp.to_i +configFile = configArray[sel-1] + + +# ------------------------------------------------------------------------------ +# methods +# ------------------------------------------------------------------------------ + + + +# ------------------------------------------------------------------------------ +# constants +# ------------------------------------------------------------------------------ + + + +# ------------------------------------------------------------------------------ +# setup +# ------------------------------------------------------------------------------ + +logger.info "Installing gems for the \"#{template_name}\" template." +Dir.chdir(platform_template_path) { system("bundle", "install") } + + + +# ------------------------------------------------------------------------------ +# core +# ------------------------------------------------------------------------------ + + + +vars = {} +# Read the config file specified in the command line into the variable "vars" +if File.file?(file = "#{config_folder_path}/#{configFile}") + vars.merge!( YAML.load(File.read(file)) ) +elsif + raise "Config file not found: #{file}" +end + +#Setting core paths +core_path = File.join(platform_template_path, "exports", vars['core']['old_space_slug'], "core") +task_path = File.join(platform_template_path, "exports", vars['core']['old_space_slug'], "task") + +# Set http_options based on values provided in the config file. +http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| + result[k.to_sym] = v +end + +# Set option values to default values if not included +vars["options"] = !vars["options"].nil? ? vars["options"] : {} +vars["options"]["delete"] = !vars["options"]["delete"].nil? ? vars["options"]["delete"] : false + +logger.info "Importing using the config: #{JSON.pretty_generate(vars)}" + + + +space_sdk = KineticSdk::Core.new({ + space_server_url: vars["core"]["server_url"], + space_slug: vars["core"]["space_slug"], + username: vars["core"]["service_user_username"], + password: vars["core"]["service_user_password"], + options: http_options.merge({ export_directory: "#{core_path}" }) +}) + +puts "Are you sure you want to perform an import of data to #{vars["core"]["server_url"]}? [Y/N]" +STDOUT.flush +case (gets.downcase.chomp) +when 'y' + puts "Continuing Import" + STDOUT.flush +else + abort "Exiting Import" +end + +################################################################### +# ------------------------------------------------------------------------------ +# Update Space Attributes +# ------------------------------------------------------------------------------ + +sourceSpaceAttributeArray = [] +destinationSpaceAttributeArray = (space_sdk.find_space_attribute_definitions().content['spaceAttributeDefinitions']|| {}).map { |definition| definition['name']} + +if File.file?(file = "#{core_path}/space/spaceAttributeDefinitions.json") + spaceAttributeDefinitions = JSON.parse(File.read(file)) + + spaceAttributeDefinitions.each { |attribute| + if destinationSpaceAttributeArray.include?(attribute['name']) + space_sdk.update_space_attribute_definition(attribute['name'], attribute) + else + space_sdk.add_space_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceSpaceAttributeArray.push(attribute['name']) + } +end + +destinationSpaceAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceSpaceAttributeArray.include?(attribute) + space_sdk.delete_space_attribute_definition(attribute) + end +} + +# ------------------------------------------------------------------------------ +# Update User Attributes +# ------------------------------------------------------------------------------ +sourceUserAttributeArray = [] +destinationUserAttributeArray = (space_sdk.find_user_attribute_definitions().content['userAttributeDefinitions'] || {}).map { |definition| definition['name']} + +if File.file?(file = "#{core_path}/space/userAttributeDefinitions.json") + userAttributeDefinitions = JSON.parse(File.read(file)) + userAttributeDefinitions.each { |attribute| + if destinationUserAttributeArray.include?(attribute['name']) + space_sdk.update_user_attribute_definition(attribute['name'], attribute) + else + space_sdk.add_user_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceUserAttributeArray.push(attribute['name']) + } +end + +destinationUserAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceUserAttributeArray.include?(attribute) + space_sdk.delete_user_attribute_definition(attribute) + end +} +# ------------------------------------------------------------------------------ +# Update User Profile Attributes +# ------------------------------------------------------------------------------ + +sourceUserProfileAttributeArray = [] +destinationUserProfileAttributeArray = (space_sdk.find_user_profile_attribute_definitions().content['userProfileAttributeDefinitions'] || {}).map { |definition| definition['name']} + +if File.file?(file = "#{core_path}/space/userProfileAttributeDefinitions.json") + userProfileAttributeDefinitions = JSON.parse(File.read(file)) + + userProfileAttributeDefinitions.each { |attribute| + if destinationUserProfileAttributeArray.include?(attribute['name']) + space_sdk.update_user_profile_attribute_definition(attribute['name'], attribute) + else + space_sdk.add_user_profile_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceUserProfileAttributeArray.push(attribute['name']) + } +end + +destinationUserProfileAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceUserProfileAttributeArray.include?(attribute) + space_sdk.delete_user_profile_attribute_definition(attribute) + end +} + + +# ------------------------------------------------------------------------------ +# Update Team Attributes +# ------------------------------------------------------------------------------ + +sourceTeamAttributeArray = [] +destinationTeamAttributeArray = (space_sdk.find_team_attribute_definitions().content['teamAttributeDefinitions']|| {}).map { |definition| definition['name']} + +if File.file?(file = "#{core_path}/space/teamAttributeDefinitions.json") + teamAttributeDefinitions = JSON.parse(File.read(file)) + teamAttributeDefinitions.each { |attribute| + if destinationTeamAttributeArray.include?(attribute['name']) + space_sdk.update_team_attribute_definition(attribute['name'], attribute) + else + space_sdk.add_team_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceTeamAttributeArray.push(attribute['name']) + } +end + +destinationTeamAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceTeamAttributeArray.include?(attribute) + space_sdk.delete_team_attribute_definition(attribute) + end +} + + +# ------------------------------------------------------------------------------ +# Update Datastore Attributes +# ------------------------------------------------------------------------------ + +sourceDatastoreAttributeArray = [] +destinationDatastoreAttributeArray =(space_sdk.find_datastore_form_attribute_definitions().content['datastoreFormAttributeDefinitions'] || {}).map { |definition| definition['name']} + +if File.file?(file = "#{core_path}/space/datastoreFormAttributeDefinitions.json") + datastoreFormAttributeDefinitions = JSON.parse(File.read(file)) + datastoreFormAttributeDefinitions.each { |attribute| + if destinationDatastoreAttributeArray.include?(attribute['name']) + space_sdk.update_datastore_form_attribute_definition(attribute['name'], attribute) + else + space_sdk.add_datastore_form_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceDatastoreAttributeArray.push(attribute['name']) + } +end + +destinationDatastoreAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceDatastoreAttributeArray.include?(attribute) + #Delete form is disabled + #space_sdk.delete_datastore_form_attribute_definition(attribute) + end +} + + +# ------------------------------------------------------------------------------ +# Update Security Policy +# ------------------------------------------------------------------------------ + +sourceSecurityPolicyArray = [] +destinationSecurityPolicyArray = (space_sdk.find_space_security_policy_definitions().content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} + +if File.file?(file = "#{core_path}/space/securityPolicyDefinitions.json") + securityPolicyDefinitions = JSON.parse(File.read(file)) + securityPolicyDefinitions.each { |attribute| + if destinationSecurityPolicyArray.include?(attribute['name']) + space_sdk.update_space_security_policy_definition(attribute['name'], attribute) + else + space_sdk.add_space_security_policy_definition(attribute) + end + sourceSecurityPolicyArray.push(attribute['name']) + } +end + +destinationSecurityPolicyArray.each { | attribute | + if vars["options"]["delete"] && !sourceSecurityPolicyArray.include?(attribute) + space_sdk.delete_space_security_policy_definition(attribute) + end +} + + +# ------------------------------------------------------------------------------ +# import bridge models +# *NOTE* - This if the bridge doesn't exist the model will be imported w/ an empty "Bridge Slug" value. +# ------------------------------------------------------------------------------ + +destinationModels = space_sdk.find_bridge_models() +destinationModels_Array = (destinationModels.content['models'] || {}).map{ |model| model['name']} + +Dir["#{core_path}/space/models/*.json"].each{ |model| + body = JSON.parse(File.read(model)) + if destinationModels_Array.include?(body['name']) + space_sdk.update_bridge_model(body['name'], body) + elsif + space_sdk.add_bridge_model(body) + end +} + +# ------------------------------------------------------------------------------ +# delete bridge models +# Delete any Bridges from the destination which are missing from the import data +# ------------------------------------------------------------------------------ +SourceModelsArray = Dir["#{core_path}/space/models/*.json"].map{ |model| JSON.parse(File.read(model))['name'] } + +destinationModels_Array.each do |model| + if vars["options"]["delete"] && !SourceModelsArray.include?(model) + space_sdk.delete_bridge_model(model) + end +end + +# ------------------------------------------------------------------------------ +# Import Space Web APIs +# ------------------------------------------------------------------------------ + +sourceSpaceWebApisArray = [] +destinationSpaceWebApisArray = (space_sdk.find_space_webapis().content['webApis'] || {}).map { |definition| definition['slug']} + + +Dir["#{core_path}/space/webApis/*"].each{ |file| + body = JSON.parse(File.read(file)) + if destinationSpaceWebApisArray.include?(body['slug']) + space_sdk.update_space_webapi(body['slug'], body) + else + space_sdk.add_space_webapi(body) + end + sourceSpaceWebApisArray.push(body['slug']) +} + +# ------------------------------------------------------------------------------ +# Delete Space Web APIs +# Delete any Web APIs from the destination which are missing from the import data +# ------------------------------------------------------------------------------ +destinationSpaceWebApisArray.each { | webApi | + if vars["options"]["delete"] && !sourceSpaceWebApisArray.include?(webApi) + space_sdk.delete_space_webapi(webApi) + end +} + +# ------------------------------------------------------------------------------ +# import datastore forms +# ------------------------------------------------------------------------------ +destinationDatastoreForms = [] #From destination server +sourceDatastoreForms = [] #From import data + +logger.info "Importing datastore forms for #{vars["core"]["space_slug"]}" + + destinationDatastoreForms = (space_sdk.find_datastore_forms().content['forms'] || {}).map{ |datastore| datastore['slug']} + Dir["#{core_path}/space/datastore/forms/*.json"].each { |datastore| + body = JSON.parse(File.read(datastore)) + sourceDatastoreForms.push(body['slug']) + if destinationDatastoreForms.include?(body['slug']) + space_sdk.update_datastore_form(body['slug'], body) + else + space_sdk.add_datastore_form(body) + end + } + +# ------------------------------------------------------------------------------ +# delete datastore forms +# Delete any form from the destination which are missing from the import data +# ------------------------------------------------------------------------------ + + +destinationDatastoreForms.each { |datastore_slug| + if vars["options"]["delete"] && !sourceDatastoreForms.include?(datastore_slug) + space_sdk.delete_datastore_form(datastore_slug) + end +} + +# ------------------------------------------------------------------------------ +# Import Datastore Data +# ------------------------------------------------------------------------------ +Dir["#{core_path}/space/datastore/forms/**/submissions*.ndjson"].sort.each { |filename| + dir = File.dirname(filename) + form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] + (space_sdk.find_all_form_datastore_submissions(form_slug).content['submissions'] || []).each { |submission| + space_sdk.delete_datastore_submission(submission['id']) + } + File.readlines(filename).each { |line| + submission = JSON.parse(line) + submission["values"].map { |field, value| + # if the value contains an array of files + if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') + value.map.with_index { |file, index| + # add 'path' key to the attribute value indicating the location of the attachment + file['path'] = "#{dir}#{file['path']}" + } + end + } + body = { + "values" => submission["values"], + "coreState" => submission["coreState"] + } + space_sdk.add_datastore_submission(form_slug, body).content + } +} + +# ------------------------------------------------------------------------------ +# import space teams +# ------------------------------------------------------------------------------ +if (teams = Dir["#{core_path}/space/teams/*.json"]).length > 0 + SourceTeamArray = [] + destinationTeamsArray = (space_sdk.find_teams().content['teams'] || {}).map{ |team| {"slug" => team['slug'], "name"=>team['name']} } + teams.each{ |team| + body = JSON.parse(File.read(team)) + if !destinationTeamsArray.find {|destination_team| destination_team['slug'] == body['slug'] }.nil? + space_sdk.update_team(body['slug'], body) + else + space_sdk.add_team(body) + end + #Add Attributes to the Team + (body['attributes'] || []).each{ | attribute | + space_sdk.add_team_attribute(body['name'], attribute['name'], attribute['values']) + } + SourceTeamArray.push({'name' => body['name'], 'slug'=>body['slug']} ) + } + + # ------------------------------------------------------------------------------ + # delete space teams + # TODO: A method doesn't exist for deleting the team + # ------------------------------------------------------------------------------ + + destinationTeamsArray.each { |team| + #if !SourceTeamArray.include?(team) + if SourceTeamArray.find {|source_team| source_team['slug'] == team['slug'] }.nil? + #Delete has been disabled. It is potentially too dangerous to include w/o advanced knowledge. + #space_sdk.delete_team(team['slug']) + end + } +end + +# ------------------------------------------------------------------------------ +# import kapp data +# ------------------------------------------------------------------------------ + +kapps_array = [] +Dir["#{core_path}/space/kapps/*"].each { |file| + kapp_slug = file.split(File::SEPARATOR).map {|x| x=="" ? File::SEPARATOR : x}.last.gsub('.json','') + next if kapps_array.include?(kapp_slug) # If the loop has already iterated over the kapp from the kapp file or the kapp dir skip the iteration + kapps_array.push(kapp_slug) # Append the kapp_slug to an array so a duplicate iteration doesn't occur + kapp = {} + kapp['slug'] = kapp_slug # set kapp_slug + + if File.file?(file) or ( File.directory?(file) and File.file?(file = "#{file}.json") ) # If the file is a file or a dir with a corresponding json file + kapp = JSON.parse( File.read(file) ) + kappExists = space_sdk.find_kapp(kapp['slug']).code.to_i == 200 + if kappExists + space_sdk.update_kapp(kapp['slug'], kapp) + else + space_sdk.add_kapp(kapp['name'], kapp['slug'], kapp) + end + end + + # ------------------------------------------------------------------------------ + # Migrate Kapp Attribute Definitions + # ------------------------------------------------------------------------------ + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/kappAttributeDefinitions.json") + sourceKappAttributeArray = [] + destinationKappAttributeArray = (space_sdk.find_kapp_attribute_definitions(kapp['slug']).content['kappAttributeDefinitions'] || {}).map { |definition| definition['name']} + kappAttributeDefinitions = JSON.parse(File.read(file)) + (kappAttributeDefinitions || []).each { |attribute| + if destinationKappAttributeArray.include?(attribute['name']) + space_sdk.update_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute) + else + space_sdk.add_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceKappAttributeArray.push(attribute['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Attribute Definitions + # ------------------------------------------------------------------------------ + destinationKappAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceKappAttributeArray.include?(attribute) + space_sdk.delete_kapp_attribute_definition(kapp['slug'],attribute) + end + } + end + + # ------------------------------------------------------------------------------ + # Migrate Kapp Category Definitions + # ------------------------------------------------------------------------------ + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categoryAttributeDefinitions.json") + sourceKappCategoryArray = [] + destinationKappAttributeArray = (space_sdk.find_category_attribute_definitions(kapp['slug']).content['categoryAttributeDefinitions'] || {}).map { |definition| definition['name']} + kappCategoryDefinitions = JSON.parse(File.read(file)) + (kappCategoryDefinitions || []).each { |attribute| + if destinationKappAttributeArray.include?(attribute['name']) + space_sdk.update_category_attribute_definition(kapp['slug'], attribute['name'], attribute) + else + space_sdk.add_category_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceKappCategoryArray.push(attribute['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Category Definitions + # ------------------------------------------------------------------------------ + destinationKappAttributeArray.each { | attribute | + if !sourceKappCategoryArray.include?(attribute) + space_sdk.delete_category_attribute_definition(kapp['slug'],attribute) + end + } + end + + # ------------------------------------------------------------------------------ + # Migrate Kapp Form Attribute Definitions + # ------------------------------------------------------------------------------ + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formAttributeDefinitions.json") + sourceFormAttributeArray = [] + destinationFormAttributeArray = (space_sdk.find_form_attribute_definitions(kapp['slug']).content['formAttributeDefinitions'] || {}).map { |definition| definition['name']} + formAttributeDefinitions = JSON.parse(File.read(file)) + (formAttributeDefinitions || []).each { |attribute| + if destinationFormAttributeArray.include?(attribute['name']) + space_sdk.update_form_attribute_definition(kapp['slug'], attribute['name'], attribute) + else + space_sdk.add_form_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceFormAttributeArray.push(attribute['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Form Attribute Definitions + # ------------------------------------------------------------------------------ + destinationFormAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceFormAttributeArray.include?(attribute) + space_sdk.delete_form_attribute_definition(kapp['slug'],attribute) + end + } + end + + # ------------------------------------------------------------------------------ + # Migrate Kapp Form Type Definitions + # ------------------------------------------------------------------------------ + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formTypes.json") + sourceFormTypesArray = [] + destinationFormTypesArray = (space_sdk.find_formtypes(kapp['slug']).content['formTypes'] || {}).map { |formTypes| formTypes['name']} + formTypes = JSON.parse(File.read(file)) + (formTypes || []).each { |body| + if destinationFormTypesArray.include?(body['name']) + space_sdk.update_formtype(kapp['slug'], body['name'], body) + else + space_sdk.add_formtype(kapp['slug'], body) + end + sourceFormTypesArray.push(body['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Form Type Definitions + # ------------------------------------------------------------------------------ + destinationFormTypesArray.each { | name | + if vars["options"]["delete"] && !sourceFormTypesArray.include?(name) + space_sdk.delete_formtype(kapp['slug'],name) + end + } + end + + # ------------------------------------------------------------------------------ + # Migrate Kapp Security Policy Definitions + # ------------------------------------------------------------------------------ + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/securityPolicyDefinitions.json") + sourceSecurtyPolicyArray = [] + destinationSecurtyPolicyArray = (space_sdk.find_security_policy_definitions(kapp['slug']).content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} + securityPolicyDefinitions = JSON.parse(File.read(file)) + (securityPolicyDefinitions || []).each { |attribute| + if destinationSecurtyPolicyArray.include?(attribute['name']) + space_sdk.update_security_policy_definition(kapp['slug'], attribute['name'], attribute) + else + space_sdk.add_security_policy_definition(kapp['slug'], attribute) + end + sourceSecurtyPolicyArray.push(attribute['name']) + } + + destinationSecurtyPolicyArray.each { | attribute | + if vars["options"]["delete"] && !sourceSecurtyPolicyArray.include?(attribute) + space_sdk.delete_security_policy_definition(kapp['slug'],attribute) + end + } + end + + # ------------------------------------------------------------------------------ + # Migrate Kapp Categories + # ------------------------------------------------------------------------------ + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categories.json") + sourceCategoryArray = [] + destinationCategoryArray = (space_sdk.find_categories(kapp['slug']).content['categories'] || {}).map { |definition| definition['slug']} + categories = JSON.parse(File.read(file)) + (categories || []).each { |attribute| + if destinationCategoryArray.include?(attribute['slug']) + space_sdk.update_category_on_kapp(kapp['slug'], attribute['slug'], attribute) + else + space_sdk.add_category_on_kapp(kapp['slug'], attribute) + end + sourceCategoryArray.push(attribute['slug']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Categories + # ------------------------------------------------------------------------------ + + destinationCategoryArray.each { | attribute | + if !sourceCategoryArray.include?(attribute) + space_sdk.delete_category_on_kapp(kapp['slug'],attribute) + end + } + end + + # ------------------------------------------------------------------------------ + # import space webhooks + # ------------------------------------------------------------------------------ + sourceSpaceWebhooksArray = [] + destinationSpaceWebhooksArray = (space_sdk.find_webhooks_on_space().content['webhooks'] || {}).map{ |webhook| webhook['name']} + + Dir["#{core_path}/space/webhooks/*.json"].each{ |file| + webhook = JSON.parse(File.read(file)) + if destinationSpaceWebhooksArray.include?(webhook['name']) + space_sdk.update_webhook_on_space(webhook['name'], webhook) + elsif + space_sdk.add_webhook_on_space(webhook) + end + sourceSpaceWebhooksArray.push(webhook['name']) + } + + # ------------------------------------------------------------------------------ + # delete space webhooks + # TODO: A method doesn't exist for deleting the webhook + # ------------------------------------------------------------------------------ + + destinationSpaceWebhooksArray.each do |webhook| + if vars["options"]["delete"] && !sourceSpaceWebhooksArray.include?(webhook) + space_sdk.delete_webhook_on_space(webhook) + end + end + + # ------------------------------------------------------------------------------ + # Migrate Kapp Webhooks + # ------------------------------------------------------------------------------ + sourceWebhookArray = [] + webhooks_on_kapp = space_sdk.find_webhooks_on_kapp(kapp['slug']) + + if webhooks_on_kapp.code=="200" + destinationWebhookArray = (webhooks_on_kapp.content['webhooks'] || {}).map { |definition| definition['name']} + Dir["#{core_path}/space/kapps/#{kapp['slug']}/webhooks/*.json"].each{ |webhookFile| + webhookDef = JSON.parse(File.read(webhookFile)) + if destinationWebhookArray.include?(webhookDef['name']) + space_sdk.update_webhook_on_kapp(kapp['slug'], webhookDef['name'], webhookDef) + else + space_sdk.add_webhook_on_kapp(kapp['slug'], webhookDef) + end + sourceWebhookArray.push(webhookDef['name']) + } + + # ------------------------------------------------------------------------------ + # Delete Kapp Webhooks + # ------------------------------------------------------------------------------ + destinationWebhookArray.each { | attribute | + if vars["options"]["delete"] && !sourceWebhookArray.include?(attribute) + space_sdk.delete_webhook_on_kapp(kapp['slug'],attribute) + end + } + end + + + # ------------------------------------------------------------------------------ + # Add Kapp Forms + # ------------------------------------------------------------------------------ + + if (forms = Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/*.json"]).length > 0 + sourceForms = [] #From import data + destinationForms = (space_sdk.find_forms(kapp['slug']).content['forms'] || {}).map{ |form| form['slug']} + forms.each { |form| + properties = File.read(form) + form = JSON.parse(properties) + sourceForms.push(form['slug']) + if destinationForms.include?(form['slug']) + space_sdk.update_form(kapp['slug'] ,form['slug'], form) + else + space_sdk.add_form(kapp['slug'], form) + end + } + # ------------------------------------------------------------------------------ + # delete forms + # ------------------------------------------------------------------------------ + destinationForms.each { |slug| + if vars["options"]["delete"] && !sourceForms.include?(slug) + #Delete form is disabled + #space_sdk.delete_form(kapp['slug'], slug) + end + } + end + + # ------------------------------------------------------------------------------ + # Import Kapp Form Data + # ------------------------------------------------------------------------------ + Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/**/submissions*.ndjson"].sort.each { |filename| + dir = File.dirname(filename) + form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] + + # This code could delete all submissions form the form before importing new data + # It is commented out because it could be dangerous to have in place and the delete_submission method doesn't exist currently. + #(space_sdk.find_all_form_submissions(kapp['slug'], form_slug).content['submissions'] || []).each { |submission| + # space_sdk.delete_submission(submission['id']) + #} + + File.readlines(filename).each { |line| + submission = JSON.parse(line) + submission["values"].map { |field, value| + # if the value contains an array of files + if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') + value.map.with_index { |file, index| + # add 'path' key to the attribute value indicating the location of the attachment + file['path'] = "#{dir}#{file['path']}" + } + end + } + body = { + "values" => submission["values"], + "coreState" => submission["coreState"] + } + space_sdk.add_submission(kapp['slug'], form_slug, body).content + } + } + # ------------------------------------------------------------------------------ + # Add Kapp Web APIs + # ------------------------------------------------------------------------------ + sourceWebApisArray = [] + destinationWebApisArray = (space_sdk.find_kapp_webapis(kapp['slug']).content['webApis'] || {}).map { |definition| definition['slug']} + Dir["#{core_path}/space/kapps/#{kapp['slug']}/webApis/*"].each { |webApi| + body = JSON.parse(File.read(webApi)) + if destinationWebApisArray.include?(body['slug']) + space_sdk.update_kapp_webapi(kapp['slug'], body['slug'], body) + else + space_sdk.add_kapp_webapi(kapp['slug'], body) + end + sourceWebApisArray.push(body['slug']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Web APIs + # ------------------------------------------------------------------------------ + destinationWebApisArray.each { | webApi | + if vars["options"]["delete"] && !sourceWebApisArray.include?(webApi) + space_sdk.delete_kapp_webapi(kapp['slug'], webApi) + end + } +} + +# ------------------------------------------------------------------------------ +# task +# ------------------------------------------------------------------------------ + +task_sdk = KineticSdk::Task.new({ + app_server_url: "#{vars["task"]["server_url"]}", + username: vars["task"]["service_user_username"], + password: vars["task"]["service_user_password"], + options: http_options.merge({ export_directory: "#{task_path}" }) +}) + +# ------------------------------------------------------------------------------ +# task import +# ------------------------------------------------------------------------------ + +logger.info "Importing the task components for the \"#{template_name}\" template." +logger.info " importing with api: #{task_sdk.api_url}" + +# ------------------------------------------------------------------------------ +# task handlers +# ------------------------------------------------------------------------------ + +# import handlers forcing overwrite +task_sdk.import_handlers(true) + +# ------------------------------------------------------------------------------ +# Import Task Trees and Routines +# ------------------------------------------------------------------------------ + +# import routines and force overwrite +task_sdk.import_routines(true) +# import trees and force overwrite +task_sdk.import_trees(true) + + + +# ------------------------------------------------------------------------------ +# import task categories +# ------------------------------------------------------------------------------ + +sourceCategories = [] #From import data +destinationCategories = (task_sdk.find_categories().content['categories'] || {}).map{ |category| category['name']} + +Dir["#{task_path}/categories/*.json"].each { |file| + category = JSON.parse(File.read(file)) + sourceCategories.push(category['name']) + if destinationCategories.include?(category['name']) + task_sdk.update_category(category['name'], category) + else + task_sdk.add_category(category) + end +} + +# ------------------------------------------------------------------------------ +# delete task categories +# ------------------------------------------------------------------------------ + +destinationCategories.each { |category| + if vars["options"]["delete"] && !sourceCategories.include?(category) + task_sdk.delete_category(category) + end +} + +# ------------------------------------------------------------------------------ +# import task policy rules +# ------------------------------------------------------------------------------ + +destinationPolicyRuleArray = task_sdk.find_policy_rules().content['policyRules'] +sourcePolicyRuleArray = Dir["#{task_path}/policyRules/*.json"].map{ |file| + rule = JSON.parse(File.read(file)) + {"name" => rule['name'], "type" => rule['type']} + } + +Dir["#{task_path}/policyRules/*.json"].each { |file| + rule = JSON.parse(File.read(file)) + if !destinationPolicyRuleArray.find {|dest_rule| dest_rule['name']==rule['name'] && dest_rule['type']==rule['type'] }.nil? + task_sdk.update_policy_rule(rule.slice('type', 'name'), rule) + else + task_sdk.add_policy_rule(rule) + end +} + +# ------------------------------------------------------------------------------ +# delete task policy rules +# ------------------------------------------------------------------------------ +destinationPolicyRuleArray.each { |rule| + if vars["options"]["delete"] && sourcePolicyRuleArray.find {|source_rule| source_rule['name']==rule['name'] && source_rule['type']==rule['type'] }.nil? + task_sdk.delete_policy_rule(rule) + end +} + +# ------------------------------------------------------------------------------ +# Delete Trees and Routines not in the Source Data +# ------------------------------------------------------------------------------ + +# identify Trees and Routines on destination +destinationtrees = [] +trees = task_sdk.find_trees().content +(trees['trees'] || []).each { |tree| + destinationtrees.push( tree['title'] ) +} + +# identify Routines in source data +sourceTrees = [] +Dir["#{task_path}/routines/*.xml"].each {|routine| + doc = Document.new(File.new(routine)) + root = doc.root + sourceTrees.push("#{root.elements["taskTree/name"].text}") +} +# identify trees in source data +Dir["#{task_path}/sources/*"].each {|source| + if File.directory? source + Dir["#{source}/trees/*.xml"].each { |tree| + doc = Document.new(File.new(tree)) + root = doc.root + tree = "#{root.elements["sourceName"].text} :: #{root.elements["sourceGroup"].text} :: #{root.elements["taskTree/name"].text}" + sourceTrees.push(tree) + } + end +} + +# Delete the extra tress and routines on the source +destinationtrees.each { | tree | + if vars["options"]["delete"] && !sourceTrees.include?(tree) + treeDef = tree.split(' :: ') + task_sdk.delete_tree( tree ) + end +} + +# ------------------------------------------------------------------------------ +# complete +# ------------------------------------------------------------------------------ + +logger.info "Finished importing the \"#{template_name}\" forms." From 32ae1409f52258490963db6227026964492eb0df Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Tue, 8 Aug 2023 13:57:33 -0500 Subject: [PATCH 03/19] Initial commit - Working standalone exe's, selection added --- .gitignore | 8 ++++ Gemfile | 4 +- Gemfile.lock | 16 +++---- StandAlone bundle/Compare_and_sync_spaces.ps1 | 36 ++++++++++++++ StandAlone bundle/Run Compare and Sync.cmd | 1 + StandAlone bundle/fiber.so | Bin 0 -> 12800 bytes StandAlone bundle/import.rb | 1 + config/sync.psd1 | 10 ++++ config/sync_spaces.ps1 | 45 ++++++++++++++++++ 9 files changed, 112 insertions(+), 9 deletions(-) create mode 100644 StandAlone bundle/Compare_and_sync_spaces.ps1 create mode 100644 StandAlone bundle/Run Compare and Sync.cmd create mode 100644 StandAlone bundle/fiber.so create mode 100644 config/sync.psd1 create mode 100644 config/sync_spaces.ps1 diff --git a/.gitignore b/.gitignore index 31286c8..c55caf7 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,11 @@ config/*.yml !config/servername_environment_import_config.yml !config/servername_environment_export_config.yml !config/servername_environment_export_specific_config.yml +exports/* +StandAlone bundle/exports/* +StandAlone bundle/config/*.yml +!StandAlone bundle/config/servername_environment_import_config.yml +!StandAlone bundle/config/servername_environment_export_config.yml +!StandAlone bundle/config/servername_environment_export_specific_config.yml +*.exe +*.log \ No newline at end of file diff --git a/Gemfile b/Gemfile index c024cac..e2f1483 100644 --- a/Gemfile +++ b/Gemfile @@ -1,3 +1,5 @@ source 'https://rubygems.org' -gem 'kinetic_sdk', '5.0.19' +gem 'kinetic_sdk', '5.0.21' +gem 'rexml', '~> 3.2', '>= 3.2.5' +gem 'find', '~> 0.1.1' \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 564e43b..21a61de 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,18 +1,16 @@ GEM remote: https://rubygems.org/ specs: - kinetic_sdk (5.0.19) + find (0.1.1) + kinetic_sdk (5.0.21) mime-types (>= 3.3.1) multipart-post (= 2.0.0) - parallel (= 1.12.1) - ruby-progressbar (= 1.9.0) slugify (= 1.0.7) - mime-types (3.3.1) + mime-types (3.4.1) mime-types-data (~> 3.2015) - mime-types-data (3.2021.0704) + mime-types-data (3.2023.0218.1) multipart-post (2.0.0) - parallel (1.12.1) - ruby-progressbar (1.9.0) + rexml (3.2.5) slugify (1.0.7) PLATFORMS @@ -21,7 +19,9 @@ PLATFORMS x64-mingw32 DEPENDENCIES - kinetic_sdk (= 5.0.19) + find (~> 0.1.1) + kinetic_sdk (= 5.0.21) + rexml (~> 3.2, >= 3.2.5) BUNDLED WITH 2.2.5 diff --git a/StandAlone bundle/Compare_and_sync_spaces.ps1 b/StandAlone bundle/Compare_and_sync_spaces.ps1 new file mode 100644 index 0000000..401c6f0 --- /dev/null +++ b/StandAlone bundle/Compare_and_sync_spaces.ps1 @@ -0,0 +1,36 @@ +#Comparison script to see delta in OldSpace and NewSpace for syncing changes +#Written by Travis Wiese - 6/27/2023 + + +Write-Output "Select Option +1) Export Space +2) Import Space +3) Exit" +do{ + + $selection = read-host "Selection" + switch($selection) + { + 1 { + $ExportConfigList = Get-childitem -Path "Config/*" -Filter "*export*" -include *.yml,*.yaml + for($x=1;$x -ne $ExportConfigList.Length;$x++){Write-Output "$x) $($ExportConfigList[$x].Name)"} + $Opt = Read-host "Select config" + ruby './export.rb' -c "Config/$($ExportConfigList[$opt].Name)" + } + 2 { + $ImportConfigList = Get-childitem -Path "Config/*" -Filter "*import*" -include *.yml,*.yaml + for($x=1;$x -ne $ImportConfigList.Length;$x++){Write-Output "$x) $($ImportConfigList[$x].Name)"} + $Opt = Read-host "Select config" + # $config_file_path = read-host "Provide relative filepath for configuration(ex: config/export_config_Bluestone.yml)" + ruby './import.rb' -c "Config/$($ImportConfigList[$opt].Name)" + } + 3 {Exit} + default { + Write-Output $("Select Option + 1) Export Space + 2) Import Space + 3) Exit" -replace "(?m)^\s+") + } + } +}while($true) + diff --git a/StandAlone bundle/Run Compare and Sync.cmd b/StandAlone bundle/Run Compare and Sync.cmd new file mode 100644 index 0000000..2775bdb --- /dev/null +++ b/StandAlone bundle/Run Compare and Sync.cmd @@ -0,0 +1 @@ +Powershell.exe -ExecutionPolicy Bypass -command ".\Compare_and_sync_spaces.ps1" \ No newline at end of file diff --git a/StandAlone bundle/fiber.so b/StandAlone bundle/fiber.so new file mode 100644 index 0000000000000000000000000000000000000000..08ab26a531b90dec0047c6b88eeaf8a1ae2b24b9 GIT binary patch literal 12800 zcmeHN4Rl+@l^$7=tt56Nh)oSNo0s5#(^Q8JUNoHh>nNTtr#&!eJ<5J(B{OU#X%sG2! zvL~j!ICr|%#z zAJHF2AwG(+oJZx01UtG~^|?-=k@y@rOhBQxHlOnOtMNq1ADb9U@3%2#o#SHemF~us zH?C^nv3DxH(hK#{v++aGZF1Q=V5&;R58bvt_FDs&XFpNJSY6feXK0$GvR|N;%Zq;5 z%h*`4b&iLzJqi|4<`thbI?k;>y@E>zcsvzNb9vd_=qVS)P83-C-bLpco# z#bf&`*RF56*}cwv^FHew7CULM#vemNkjtj|Gyzb48ANb7Y^lmX>6%_BEw|3;LmSA> z1J*vQ{j338Iza|pLfVfJ|B~sXzc6;fpy`(j<0)(3!|0(C_X3S)tbN}iX|h>{PA)U_ z7fb3TWiQ2nPx^IcBxZhXJ!6KCvl`<+o!qYa1^$-MO=-h`hl(JJ@jEY3nbU?$CX-Yh znqCQ1kLZs}#8v=4K?UN(>443DJ5C5qKdt%s`)`J}W` z4H>9-q@z?T&8@km1Io|-E0clGjdfKYdZh88LMY2%a)q_)X=>f;_i5u(p2C85Cp;CI z30at$p0Lub_7_tVWaXzN;FJ!)l8x@zDMK`@JciMueJ?qIjQGbaNAw<$H8W+4M=irqjP1g8jK*tV{DsB4H%3_T@Euf{n-JT6U_}{$M z4xhs!xcG;E%4`Uf9X-bVbq|E%t-x47+zeR<# zPNB2z%M(#LPmQz82&fkmHQ~2#*=6BUnLt%q2eim^|7$B9QPaJMBos&ku@f2589yir z(r_$o;E68}@&&^@Ar77(`!ch5Y|O%)???Xt({F>-+KNqIpbh|ggf_39gv%feTpe3F zh?z-6$H#u6(Pi_aYt?8f#OlU688SbBI!$2u>yVJ7@x)FV#V^GViL)ja5gDtkZ$(Bz zo1`bR_M0EVW*@655DSOYtvYtHK%9lmL%eo^c`afN{+@l4OkP=X0|Doo;=gO_QZ~{O zFM!`-`!K#Yqrc1RIH7+=u6`0ZIr-$Ds#N|Ax*KULt-)5x5#+1)Q2cPIv0}AP@>B$T zlCNR|2Ue;4TQQ=0_xWg6!x_#U_zFetfb!mSnwfPKm1<|@Q843JE&F-v?D!h$|3dtT_1ovb!H9<*P^u2{SgL%&pIo;2oJV^~eF%{{6rW+x>wMYz ztvg{zY`_5CO6Tj**RkU%bFp1%`?$65Z5oDbz7PTtArJC_V@7T`%%!lPMZQvj^M-tF zF;6sB2=3k3iWoNxpvMO{kq}>bd8X{WZ9r-By&bKxr^1u)r(#E+5iJS-so2qvqNOqO z0Jvepen0IR2pAVb-r%yYLf~>^MU8tmHV?Mjg_p-#m*U3R-&4h3WARF_bE(0WDeHq0 zg!~hkvNfbGQF3en64s#7hLx$#*AaCI!?HE#DzCDD{S-z5m&`u`3E7n|ILu=wif(%V z?-|zbEP`a&2&cyHCv3Ti(7On2u&Al@Ux6dSi+#>x*4TQiIG?;|H*|P^4*j?RV4tfZ zzgTyj!=?AU(ujBaFp>KXd98eeFBtS1UQ_-KJE!NaD|I>Dt+G+>Hd%KyAbJ#^;XizG zP3)v#?NcDpdgoreV<#=*Phux+)_4{4Tc6C44dLx^O*qWb604!6YD4{b&nf`G_wOJf*K+^FM$ALFkcL9VKfn`)yq2mU<-XS7Sp6qkWZE zEDM9_n}^cc^ke^#XCcRJgrlT^8P`~>@fS!s3`y?-qMkoo*7!Fu1TK@&x_u9vqPVy+ zeG*LizJ}J9eA9j(Kgg}#mnJzoz=?sRM^texruzLY49_Poap4>b$9d$hLL+?lp~q-P zPo+lfW2eE7rj2ku;_se|5HXK^q~e_Lhkzq5xEuAu8hA!lVFt@K_K>l&A3}x4kBHSc za`mXBC947C8JX9-(*De=SQInsVj}YeSFd8xR$z-r7;jy^d$*FG5$< z&zd$tHeUk{>6ct0LDRvC#NLGcb#FO*wFi^T6fecMw2!vcr4klKiGzenVN0g$z-C~w zIkOTVQBtnbXTL`^X*m|{Gib)aM;96!&W!yqyZ_Q|3hgg~S9%_*tvVkzhh8WpaRC5%BAb1s>d@FsJ^#X>^Ra!(2IpZeyKT`ij%0HomuhWF0w)}ZxI-0DGie6I zYxxH&&x^*fS9E=<^!Bh!IdeyggBOhBj@k|6#0F- zvwls~%3H5r$_+1VchU`Bv}FBH?OV%d6j;BT0?EaAt9vPS5uPZuSf8vd#qL#5Zhi8G zLiox-H;F{)eZGWmq%!MYnHS@TNeft*J6m?mj^q2Y)Z8n(T!nt2`#}t@g z>h&hX{grfcVO|8i*ZF~Xk$X4xVYu~Il={{~6XpOX-BB$nk&U!db zOK~XJetG^pw{PD@9vz=LsTb*%Bk|LuvJYJs3D{$>0-xn{oWxvm=SWmhO~$xjuk&N< zozREFqZ&t=KA+W-BFf~M3;q8%@AUCb^q(BBW-nhbFxBKYa-ID#el8`Wz0QH?F?cmA z4}2Bw{~U1-D07H3v3`8=?2u~DK{)UVih%mo@?sDhw9*Q2V zt2#((z~?g@9-9uR`Mwlpn;RgIXRpN%-u@d8)?B>-N4#MiU5^~gKfhk@=ga5= z8Bh2s$|L9U?P;|Zo5uJe6;oS#-iBW}h^ueP@19h*vK-pfdS?EbuWR?2t95v}4(I9c zY#kQp@b_Vj|Bwy`RX7pFnw0%#WLDwc} zkWS14CyjjIsEJFjD8(caRLtoLZul*iaBd!_qw3frYJS~IiuVYleHu)qcCen$6G=)V zNRLq1U{gN`sACzEG*h2O4UnUucXEg3%{m$mR7GKD0u2(hAi{+L(rr zwxRu2yy$#TLH9kc^NpTaT)QH@OIIY7wE1_mH?BeE$|h@Ui}~pyspd!Q)$}P2DJoIw zRXt&V3>+Eu;T~JtrOoHrIxNuP(N2y2oDTQu@KGIpUx#<=aGMS{>Trz;(+`g4r7)U* zdPe8z-;rsscr+?~0smq>2&%ltcEKn6h3>Dp&*)s8Uxy7mmsOQqf^dpcM+7uMpn?oJZNbquev%uqSYl{TC zyM_5-VMBAb(Agz~HZ|W8T+|(G6+>N}Ob}LuB4V_;=v3up?N}zu6DAYKhh;aR<>tPNZcT_;zuaKv1LB4kb$M zX3|YRWQkp^T^+*aV5FM}^V{s2)t}t2B_s;np>4rj{;XXNN50?2r>qa#iv0Geb>_-% z#}5^+KUG8tTCB ze_Mt6hMO9y$(b0#5Ro$0B50m06YNLj&cHc7;rPnDc~c3e?Vc)D0A^Y6ciKiQNZi*QK1xg0PuDcf#`t0 zL8%1(zkLkoXecgX#kCekQJ^qv_M1`#BSb4?#nmb;Wg00MRr!cFx0npfR0~?6krmb! zI1B;3{ePkt8Cg+_(NT~BpT)#1_n92VKtak-HhoE%(biJzm=TyBo|Y;aF^?L@4SvHk zGn-axc9;Uju-d1{#EPoXKVS$O%mvI`ZE#G$tX(9xT9=!peHyfwnWa|k6E^sD+p;pU zdLZMk^*Q#@TLBtdb+0~uz-gWtH6ovRK9?Tw zCnwGJ-H(0cUhKCyv_8;kKzlrgHVj%XXdm~C>+};-+RN|LxE(s=I$W(oQHS^F@cTOa zu@1Akp4RKn>F}@)kL&Oq9e$|8Kk3i{o9VIX@H`z>=+Lgi8Xaof*t`GA#&d)57V>&LCsw8$7{|pcq^p35lWB=8gvaKK?aBU8fj~ zOeM3;1y=<{Uvsxu7m0L5@Efa-% z?^d?h*c9zlu{oyZmad4%=9(}x^?!a|Q+p(caj!D9_h87t9x_FOTH~{(ZZXmk#P`Hu z!a7?wh1ol%&F$exs8ei5q|n+x*>dUPMVq=ew?@Q8D^sM%fN$;295`@xWiM2L#;zVBLwG zcsbN78ngAg`gy;s!WT1}?pj_nza_0Gq_P~DwwGkSC literal 0 HcmV?d00001 diff --git a/StandAlone bundle/import.rb b/StandAlone bundle/import.rb index 7ab1fb3..03df699 100644 --- a/StandAlone bundle/import.rb +++ b/StandAlone bundle/import.rb @@ -44,6 +44,7 @@ require 'optparse' require 'kinetic_sdk' include REXML +require 'find' template_name = "platform-template" diff --git a/config/sync.psd1 b/config/sync.psd1 new file mode 100644 index 0000000..e38b590 --- /dev/null +++ b/config/sync.psd1 @@ -0,0 +1,10 @@ +@{ + Core = @{ + OLDSPACE_SLUG = "bluestone-dev" + NEWSPACE_SLUG = "dmp-kd-test" + } + http_options = @{ + log_level = 'info' + log_output = 'stderr' + } +} diff --git a/config/sync_spaces.ps1 b/config/sync_spaces.ps1 new file mode 100644 index 0000000..57caa3e --- /dev/null +++ b/config/sync_spaces.ps1 @@ -0,0 +1,45 @@ + + + +$Config = Import-LocalizedData -BaseDirectory 'C:\Users\travis.wiese\Source\repos\platform-template\config\' -FileName 'sync.psd1' + +$CURRENT_DIR = (&{If([string]::isnullorempty($PSScriptRoot)) {$pwd.path} else {$PSScriptRoot}}) + + +$OLDSPACE_SLUG = $config['core'].OLDSPACE_SLUG +$NEWSPACE_SLUG = $config['core'].NEWSPACE_SLUG + +$OLDSPACE_PATH = $CURRENT_DIR + '\exports\' + $OLDSPACE_SLUG +$NEWSPACE_PATH = $CURRENT_DIR + '\exports\' + $NEWSPACE_SLUG + +$OLDSPACE_Items = Get-ChildItem $OLDSPACE_PATH -Recurse +$NEWSPACE_Items = Get-ChildItem $NEWSPACE_PATH -Recurse + +$Missing_Files = compare-object -ReferenceObject $OLDSPACE_Items -DifferenceObject $NEWSPACE_Items -ExcludeDifferent -IncludeEqual + + +$NewArr = @() +Foreach($obj in $OLDSPACE_Items) { + $OLDRelPath = $obj.Directoryname.replace("$CURRENT_DIR\exports\$oldspace_slug\",'') + $New_Items = $NEWSPACE_Items | where-object {$_.Directoryname -like "*$OLDRelPath" -and $_.name -eq $obj.name} + try{ + $FolderDifference = Compare-object -ReferenceObject $obj -DifferenceObject $New_Items -Property name + }catch{ + continue + } + + if([string]::isnullorempty($FolderDifference)){ + #Folders match + continue + } + + <# This tracks all unequal folder items #> + $newarr += [pscustomobject] @{ + dir = $OLDRelPath + OldFiles = $obj + NewFiles = $New_Items + } + + + +} \ No newline at end of file From 2ea7701ee113a9f19cba7875202d54edc9f8c20f Mon Sep 17 00:00:00 2001 From: briapete Date: Mon, 21 Aug 2023 10:02:42 -0500 Subject: [PATCH 04/19] Update import.rb The Kapp Category Attribute Definitions and Categories import was always deleting the values that existed on the destination server but not in the import data. Added the condition vars["options"]["delete"] && to the qualification of the delete --- import.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/import.rb b/import.rb index d7a7285..aef1182 100644 --- a/import.rb +++ b/import.rb @@ -522,7 +522,7 @@ # Delete Kapp Category Definitions # ------------------------------------------------------------------------------ destinationKappAttributeArray.each { | attribute | - if !sourceKappCategoryArray.include?(attribute) + if vars["options"]["delete"] && !sourceKappCategoryArray.include?(attribute) space_sdk.delete_category_attribute_definition(kapp['slug'],attribute) end } @@ -621,7 +621,7 @@ # ------------------------------------------------------------------------------ destinationCategoryArray.each { | attribute | - if !sourceCategoryArray.include?(attribute) + if vars["options"]["delete"] && !sourceCategoryArray.include?(attribute) space_sdk.delete_category_on_kapp(kapp['slug'],attribute) end } From 1ab48a861c7fcbce4148f8fd3ac55f5f82652761 Mon Sep 17 00:00:00 2001 From: Brian Peterson Date: Fri, 25 Aug 2023 07:42:32 -0500 Subject: [PATCH 05/19] -Update for v6 Workflow Added the import and export of v6 workflows. -Added the gem 'kinetic_sdk', '5.0.22' to dependencies --- Gemfile | 4 ++-- export.rb | 5 +++-- import.rb | 4 ++++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/Gemfile b/Gemfile index e2f1483..dfc987e 100644 --- a/Gemfile +++ b/Gemfile @@ -1,5 +1,5 @@ source 'https://rubygems.org' -gem 'kinetic_sdk', '5.0.21' +gem 'kinetic_sdk', '5.0.22' gem 'rexml', '~> 3.2', '>= 3.2.5' -gem 'find', '~> 0.1.1' \ No newline at end of file +gem 'find', '~> 0.1.1' diff --git a/export.rb b/export.rb index 5da977c..6f4d3c8 100644 --- a/export.rb +++ b/export.rb @@ -196,7 +196,7 @@ def remove_discussion_id_attribute(model) # export submissions logger.info "Exporting and writing submission data" -(SUBMISSIONS_TO_EXPORT || []).each do |item| +(SUBMISSIONS_TO_EXPORT || []).delete_if{ |item| item["kappSlug"].nil?}.each do |item| is_datastore = item["datastore"] || false logger.info "Exporting - #{is_datastore ? 'datastore' : 'kapp'} form #{item['formSlug']}" # build directory to write files to @@ -302,7 +302,8 @@ def remove_discussion_id_attribute(model) task_sdk.export_categories() task_sdk.export_access_keys() - +# Export workflows as these are not the same as Trees and Routines +space_sdk.export_workflows() # ------------------------------------------------------------------------------ # complete diff --git a/import.rb b/import.rb index aef1182..7f595de 100644 --- a/import.rb +++ b/import.rb @@ -895,6 +895,10 @@ end } +# Import v6 workflows as these are not not the same as Trees and Routines +logger.info "Importing workflows" +space_sdk.import_workflows(vars["core"]["space_slug"]) + # ------------------------------------------------------------------------------ # complete # ------------------------------------------------------------------------------ From dae2c2f84197583cfee68faccc7b3a78909278fe Mon Sep 17 00:00:00 2001 From: Brian Peterson Date: Mon, 2 Oct 2023 11:11:57 -0500 Subject: [PATCH 06/19] Standalone bundle - better handling on whether configfile passed through or selecting one. Updated main export.rb with selection changes. Updated Gemfile --- Gemfile | 3 + Gemfile.lock | 15 +++- StandAlone bundle/export.rb | 155 ++++++++++++++++++++---------------- export-specific.rb | 85 ++++++++++++++++---- export.rb | 26 ++++-- 5 files changed, 190 insertions(+), 94 deletions(-) diff --git a/Gemfile b/Gemfile index dfc987e..6a6b214 100644 --- a/Gemfile +++ b/Gemfile @@ -3,3 +3,6 @@ source 'https://rubygems.org' gem 'kinetic_sdk', '5.0.22' gem 'rexml', '~> 3.2', '>= 3.2.5' gem 'find', '~> 0.1.1' +gem 'logger', '~> 1.5.2' +gem 'json', '~> 2.6.3' +gem 'optparse', '~> 0.3.1' diff --git a/Gemfile.lock b/Gemfile.lock index 21a61de..17eb94b 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -2,14 +2,18 @@ GEM remote: https://rubygems.org/ specs: find (0.1.1) - kinetic_sdk (5.0.21) + json (2.6.3) + json (2.6.3-java) + kinetic_sdk (5.0.22) mime-types (>= 3.3.1) multipart-post (= 2.0.0) slugify (= 1.0.7) - mime-types (3.4.1) + logger (1.5.3) + mime-types (3.5.1) mime-types-data (~> 3.2015) - mime-types-data (3.2023.0218.1) + mime-types-data (3.2023.0808) multipart-post (2.0.0) + optparse (0.3.1) rexml (3.2.5) slugify (1.0.7) @@ -20,7 +24,10 @@ PLATFORMS DEPENDENCIES find (~> 0.1.1) - kinetic_sdk (= 5.0.21) + json (~> 2.6.3) + kinetic_sdk (= 5.0.22) + logger (~> 1.5.2) + optparse (~> 0.3.1) rexml (~> 3.2, >= 3.2.5) BUNDLED WITH diff --git a/StandAlone bundle/export.rb b/StandAlone bundle/export.rb index 00df24d..4d24933 100644 --- a/StandAlone bundle/export.rb +++ b/StandAlone bundle/export.rb @@ -61,7 +61,7 @@ begin logger = Logger.new("#{pwd}\\output.log") rescue - logger = Logger.new('C:\Users\travis.wiese\Source\repos\platform-template\StandAlone bundle\output.log') + logger = Logger.new('C:\temp\KD_export_output.log') #TODO - Ask for path of configs end logger.level = Logger::INFO @@ -72,85 +72,102 @@ logger.info "Base directory: #{pwd}" # Determine the Present Working Directory +configFile = nil - -#ARGV << '-h' if ARGV.empty? +ARGV << '-h' if ARGV.empty? # The options specified on the command line will be collected in *options*. -# options = {} -# OptionParser.new do |opts| -# opts.banner = "Usage: example.rb [options]" - -# opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| -# options["CONFIG_FILE"] = config -# end - - # No argument, shows at tail. This will print an options summary. - # Try it and see! -# opts.on_tail("-h", "--help", "Show this message") do -# puts opts -# exit -# end -# end.parse! +options = {} +OptionParser.new do |opts| + break if opts === nil + opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| + configFile = config + end +end.parse! # determine the directory paths -# platform_template_path = File.dirname(File.expand_path(__FILE__)) -platform_template_path = Dir.pwd -config_folder_path = File.join(platform_template_path,'config') - -if !File.directory?(config_folder_path) - logger.info "Config folder not found at #{config_folder_path}" - puts "Cannot find config folder!" - puts "Exiting..." - gets - exit -end - -# #Determine Config file to use -config_exts = ['.yaml','.yml'] -configArray = [] -logger.info "Checking #{config_folder_path} for config files" -begin - Find.find("#{config_folder_path}/") do |file| - configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('export')) - end -rescue - logger.info "Error finding default config file path!" - puts "Cannot find config files in default path! (#{pwd})" - puts "Exiting script..." - $stdin.gets - exit -end -logger.info "Found config files" + # platform_template_path = File.dirname(File.expand_path(__FILE__)) + platform_template_path = Dir.pwd + config_folder_path = File.join(platform_template_path,'config') -puts "Select your config file" -configArray.each_with_index do |cFile, index| - puts "#{index+1}) #{cFile}" -end -logger.info "Sel section" -print "Selection: " -sel = $stdin.gets.chomp.to_i -begin - configFile = configArray[sel-1] - logger.info "Option #{sel} - #{configFile}" -rescue - logger.info "Error selecting config file!" - puts "Error selecting config file!" - puts "Exiting..." - gets - exit + #If no config passed through, run config selector +if configFile == nil + configFile = config_selection end +puts "config: #{configFile}" logger.info "Post selection" - - - # ------------------------------------------------------------------------------ # methods # ------------------------------------------------------------------------------ +#Selection method if not passing config file path +def config_selection + + + + #Ensure config folder exists + if !File.directory?(config_folder_path) + logger.info "Config folder not found at #{config_folder_path}" + puts "Cannot find config folder!" + puts "Exiting..." + gets + exit + end + + # #Determine Config file to use + config_exts = ['.yaml','.yml'] + configArray = [] + logger.info "Checking #{config_folder_path} for config files" + begin + Find.find("#{config_folder_path}/") do |file| + configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('export')) + end + rescue + #No config files found in config folder + logger.info "Error finding default config file path!" + puts "Cannot find config files in default path! (#{pwd})" + puts "Exiting script..." + gets + exit + end + logger.info "Found config files" + + + + #Print config file options with number indicators to select + puts "Select your config file" + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + logger.info "Select section" + begin + print "Selection (0 to repeat options): " + sel = gets.chomp.to_i + begin + puts "sel is #{sel}" + if sel === 0 + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + next + end + configFile = configArray[sel-1] + logger.info "Option #{sel} - #{configFile}" + break + rescue + logger.info "Error selecting config file!" + puts "Error selecting config file!" + puts "Exiting..." + gets + exit + end + end while true + return configFile +end + # Removes discussion id attribute from a given model def remove_discussion_id_attribute(model) if !model.is_a?(Array) @@ -178,7 +195,8 @@ def remove_discussion_id_attribute(model) vars = {} -# Read the config file specified in the command line into the variable "vars" +# Read the config file specified from the above selection + if File.file?(file = "#{config_folder_path}/#{configFile}") vars.merge!( YAML.load(File.read(file)) ) end @@ -258,7 +276,7 @@ def remove_discussion_id_attribute(model) # export submissions logger.info "Exporting and writing submission data" -(SUBMISSIONS_TO_EXPORT || []).each do |item| +(SUBMISSIONS_TO_EXPORT || []).delete_if{ |item| item["kappSlug"].nil?}.each do |item| is_datastore = item["datastore"] || false logger.info "Exporting - #{is_datastore ? 'datastore' : 'kapp'} form #{item['formSlug']}" # build directory to write files to @@ -364,7 +382,8 @@ def remove_discussion_id_attribute(model) task_sdk.export_categories() task_sdk.export_access_keys() - +# Export workflows as these are not the same as Trees and Routines +space_sdk.export_workflows() # ------------------------------------------------------------------------------ # complete diff --git a/export-specific.rb b/export-specific.rb index da85643..2250675 100644 --- a/export-specific.rb +++ b/export-specific.rb @@ -216,11 +216,28 @@ def export_submissions(item) Dir.chdir(platform_template_path) { system("bundle", "install") } vars = {} +file = "#{platform_template_path}/#{options['CONFIG_FILE']}" + +# Check if configuration file exists +$logger.info "Validating configuration file." +begin + if File.exist?(file) != true + raise "The file \"#{options['CONFIG_FILE']}\" does not exist." + end +rescue => error + $logger.info error + exit +end # Read the config file specified in the command line into the variable "vars" -if File.file?(file = "#{platform_template_path}/#{options['CONFIG_FILE']}") +begin vars.merge!( YAML.load(File.read(file)) ) +rescue => error + $logger.info "Error loading YAML configuration" + $logger.info error + exit end +$logger.info "Configuration file passed validation." # Set http_options based on values provided in the config file. http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| @@ -233,16 +250,9 @@ def export_submissions(item) end # Output the yml file config -$logger.info JSON.pretty_generate(vars) +$logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" -# ------------------------------------------------------------------------------ -# core -# ------------------------------------------------------------------------------ - -$logger.info "Removing files and folders from the existing \"#{template_name}\" template." -FileUtils.rm_rf Dir.glob("#{$core_path}/*") - -$logger.info "Setting up the Core SDK" +$logger.info "Setting up the SDK" $space_sdk = KineticSdk::Core.new({ space_server_url: vars["core"]["server_url"], @@ -252,6 +262,54 @@ def export_submissions(item) options: http_options.merge({ export_directory: "#{$core_path}" }) }) +task_sdk = KineticSdk::Task.new({ + app_server_url: "#{vars["task"]["server_url"]}", + username: vars["task"]["service_user_username"], + password: vars["task"]["service_user_password"], + options: http_options.merge({ export_directory: "#{$task_path}" }) +}) + +# ------------------------------------------------------------------------------ +# Validate connection and Credentials to Server +# ------------------------------------------------------------------------------ + +# Validate Core Connection +begin + $logger.info "Validating connection to Core \"#{$space_sdk.api_url}\"" + response = $space_sdk.me() + if response.status == 0 + raise response.message + elsif response.status.to_s.match(/4\d{2}/) + raise response.content['error'] + end +rescue => error + $logger.info error + exit +end + +# Validate Task Connection +begin + $logger.info "Validating connection to Task \"#{task_sdk.api_url}\"" + response = task_sdk.environment() + if response.status == 0 + raise response.message + elsif response.status.to_s.match(/4\d{2}/) + raise response.content['error'] + end +rescue => error + $logger.info error + exit +end + +$logger.info "Validating connection to Cors and Task was Successful" + +# ------------------------------------------------------------------------------ +# core +# ------------------------------------------------------------------------------ + +$logger.info "Removing files and folders from the existing \"#{template_name}\" template." +FileUtils.rm_rf Dir.glob("#{$core_path}/*") + # fetch export from core service and write to export directory $logger.info "Exporting the core components." $logger.info " exporting with api: #{$space_sdk.api_url}" @@ -403,13 +461,6 @@ def export_submissions(item) $logger.info "Setting up the Task SDK" -task_sdk = KineticSdk::Task.new({ - app_server_url: "#{vars["task"]["server_url"]}", - username: vars["task"]["service_user_username"], - password: vars["task"]["service_user_password"], - options: http_options.merge({ export_directory: "#{$task_path}" }) -}) - if vars['options'] && vars['options']['EXPORT'] && vars['options']['EXPORT']['workflow'] # Export Trees (vars['options']['EXPORT']['workflow']['trees'] || []).compact.each{ |tree_name| diff --git a/export.rb b/export.rb index 6f4d3c8..2c7e458 100644 --- a/export.rb +++ b/export.rb @@ -115,11 +115,28 @@ def remove_discussion_id_attribute(model) Dir.chdir(platform_template_path) { system("bundle", "install") } vars = {} +file = "#{platform_template_path}/#{options['CONFIG_FILE']}" + +# Check if configuration file exists +logger.info "Validating configuration file." +begin + if File.exist?(file) != true + raise "The file \"#{options['CONFIG_FILE']}\" does not exist." + end +rescue => error + logger.info error + exit +end # Read the config file specified in the command line into the variable "vars" -if File.file?(file = "#{platform_template_path}/#{options['CONFIG_FILE']}") +begin vars.merge!( YAML.load(File.read(file)) ) +rescue => error + logger.info "Error loading YAML configuration" + logger.info error + exit end +logger.info "Configuration file passed validation." # Set http_options based on values provided in the config file. http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| @@ -130,9 +147,8 @@ def remove_discussion_id_attribute(model) SUBMISSIONS_TO_EXPORT = vars["options"]["SUBMISSIONS_TO_EXPORT"] REMOVE_DATA_PROPERTIES = vars["options"]["REMOVE_DATA_PROPERTIES"] -# ------------------------------------------------------------------------------ -# core -# ------------------------------------------------------------------------------ +# Output the yml file config +logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" #Setting core paths core_path = File.join(platform_template_path, "exports", vars['core']['space_slug'], "core") @@ -149,7 +165,6 @@ def remove_discussion_id_attribute(model) }) - logger.info "Removing files and folders from the existing \"#{template_name}\" template." FileUtils.rm_rf Dir.glob("#{core_path}/*") @@ -295,6 +310,7 @@ def remove_discussion_id_attribute(model) task_sdk.export_tree(tree['title']) end end + task_sdk.export_routines() task_sdk.export_handlers() task_sdk.export_groups() From fcfeb8194131241ce10ec32cb8da9b4d45e0b9f8 Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Fri, 27 Oct 2023 08:54:56 -0500 Subject: [PATCH 07/19] Update with latest changes, template selection, B64 encoding --- .gitignore | 9 +-- Gemfile.lock | 13 +--- export.rb | 182 +++++++++++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 177 insertions(+), 27 deletions(-) diff --git a/.gitignore b/.gitignore index c55caf7..7b7e94f 100644 --- a/.gitignore +++ b/.gitignore @@ -3,11 +3,4 @@ config/*.yml !config/servername_environment_import_config.yml !config/servername_environment_export_config.yml !config/servername_environment_export_specific_config.yml -exports/* -StandAlone bundle/exports/* -StandAlone bundle/config/*.yml -!StandAlone bundle/config/servername_environment_import_config.yml -!StandAlone bundle/config/servername_environment_export_config.yml -!StandAlone bundle/config/servername_environment_export_specific_config.yml -*.exe -*.log \ No newline at end of file +exports/* \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 17eb94b..6482959 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,20 +1,14 @@ GEM remote: https://rubygems.org/ specs: - find (0.1.1) - json (2.6.3) - json (2.6.3-java) kinetic_sdk (5.0.22) mime-types (>= 3.3.1) multipart-post (= 2.0.0) slugify (= 1.0.7) - logger (1.5.3) mime-types (3.5.1) mime-types-data (~> 3.2015) - mime-types-data (3.2023.0808) + mime-types-data (3.2023.1003) multipart-post (2.0.0) - optparse (0.3.1) - rexml (3.2.5) slugify (1.0.7) PLATFORMS @@ -23,12 +17,7 @@ PLATFORMS x64-mingw32 DEPENDENCIES - find (~> 0.1.1) - json (~> 2.6.3) kinetic_sdk (= 5.0.22) - logger (~> 1.5.2) - optparse (~> 0.3.1) - rexml (~> 3.2, >= 3.2.5) BUNDLED WITH 2.2.5 diff --git a/export.rb b/export.rb index 2c7e458..a8eabec 100644 --- a/export.rb +++ b/export.rb @@ -44,10 +44,13 @@ =end -require 'logger' +require 'logger' #For System Logging require 'json' -require 'optparse' -require 'kinetic_sdk' +require 'optparse' #For argument parsing +require 'kinetic_sdk' +require 'Find' #For config list building +require 'io/console' #For password request +require 'base64' #For pwd encoding template_name = "platform-template" @@ -62,28 +65,97 @@ # Determine the Present Working Directory pwd = File.expand_path(File.dirname(__FILE__)) -ARGV << '-h' if ARGV.empty? # The options specified on the command line will be collected in *options*. options = {} OptionParser.new do |opts| + break if opts === '' #If no arguments provided, then break out and move to selection + #TODO - See if this can be more elegant or if this is the best we have opts.banner = "Usage: example.rb [options]" - opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| options["CONFIG_FILE"] = config end # No argument, shows at tail. This will print an options summary. # Try it and see! - opts.on_tail("-h", "--help", "Show this message") do + opts.on("-h", "--help", "Show this message") do puts opts exit end + end.parse! + +#Configuration Selection +def config_selection(config_folder_path, logger) + + #Ensure config folder exists + if !File.directory?(config_folder_path) + logger.info "Config folder not found at #{config_folder_path}" + puts "Cannot find config folder!" + puts "Exiting..." + gets + exit + end + + # #Determine Config file to use + config_exts = ['.yaml','.yml'] + configArray = [] + logger.info "Checking #{config_folder_path} for config files" + begin + Find.find("#{config_folder_path}/") do |file| + configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('export')) + end + rescue + #No config files found in config folder + logger.info "Error finding default config file path!" + puts "Cannot find config files in default path! (#{pwd})" + puts "Exiting script..." + gets + exit + end + logger.info "Found config files" + + #Print config file options with number indicators to select + puts "Select your config file" + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + logger.info "Select section" + begin + print "Selection (0 to repeat options): " + sel = gets.chomp.to_i + begin + if sel === 0 + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + next + end + configFile = configArray[sel-1] + logger.info "Option #{sel} - #{configFile}" + break + rescue + logger.info "Error selecting config file! Exiting..." + puts "Error selecting config file!" + puts "Exiting..." + gets + exit + end + end while true + return configFile +end + + +#End method + # determine the directory paths platform_template_path = File.dirname(File.expand_path(__FILE__)) +config_folder_path = File.join(platform_template_path,'config') +if options["CONFIG_FILE"].nil? + options["CONFIG_FILE"] = config_selection(config_folder_path, logger) +end # ------------------------------------------------------------------------------ # methods @@ -121,7 +193,10 @@ def remove_discussion_id_attribute(model) logger.info "Validating configuration file." begin if File.exist?(file) != true - raise "The file \"#{options['CONFIG_FILE']}\" does not exist." + file = "#{config_folder_path}/#{options['CONFIG_FILE']}" + if File.exist?(file) != true + raise "The file \"#{options['CONFIG_FILE']}\" does not exist in the base or config directories." + end end rescue => error logger.info error @@ -138,6 +213,31 @@ def remove_discussion_id_attribute(model) end logger.info "Configuration file passed validation." +def SecurePWD(file,vars,pwdAttribute) + #If no pwd, then ask for one, otherwise take current string that was not found to be B64 and convert + if vars[pwdAttribute]["service_user_password"].nil? + password = IO::console.getpass "Enter Password(#{pwdAttribute}): " + else + password =vars[pwdAttribute]["service_user_password"] + end + enc = Base64.strict_encode64(password) + vars[pwdAttribute]["service_user_password"] = enc.to_s + File.open(file, 'w') {|f| f.write vars.to_yaml} + return password +end + +#Setup secure pwd function - Checks for nil and prompts for pwd, then b64 encodes and writes to yml +if !vars["core"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["core"]["service_user_password"])) != vars["core"]["service_user_password"] + vars["core"]["service_user_password"] = SecurePWD(file,vars,"core") +end +if !vars["task"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["task"]["service_user_password"])) != vars["task"]["service_user_password"] + vars["task"]["service_user_password"] = SecurePWD(file,vars,"task") +end + +#Write PT pwds into local variable +vars["core"]["service_user_password"] = Base64.decode64(vars["core"]["service_user_password"]) +vars["task"]["service_user_password"] = Base64.decode64(vars["task"]["service_user_password"]) + # Set http_options based on values provided in the config file. http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| result[k.to_sym] = v @@ -147,6 +247,29 @@ def remove_discussion_id_attribute(model) SUBMISSIONS_TO_EXPORT = vars["options"]["SUBMISSIONS_TO_EXPORT"] REMOVE_DATA_PROPERTIES = vars["options"]["REMOVE_DATA_PROPERTIES"] +#Config exports folder exists, if not then create +if !File.directory?(File.join(platform_template_path,"exports")) + Dir.mkdir(File.join(platform_template_path, "exports")) +end + +#Setting core paths utilzing variables +if !vars['core']['space_slug'].nil? + folderName = vars['core']['space_slug'] +elsif !vars['core']['space_name'].nil? + folderName = vars['core']['space_name'] +else + puts "No space slug or name provided! Please provide one in order to export..." + gets + exit +end +core_path = File.join(platform_template_path, "exports", folderName, "core") +task_path = File.join(platform_template_path, "exports", folderName, "task") + +puts "Core #{core_path}" +puts "Task #{task_path}" +gets +exit + # Output the yml file config logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" @@ -164,6 +287,51 @@ def remove_discussion_id_attribute(model) options: http_options.merge({ export_directory: "#{core_path}" }) }) +task_sdk = KineticSdk::Task.new({ + app_server_url: "#{vars["task"]["server_url"]}", + username: vars["task"]["service_user_username"], + password: vars["task"]["service_user_password"], + options: http_options.merge({ export_directory: "#{task_path}" }) +}) + +# ------------------------------------------------------------------------------ +# Validate connection and Credentials to Server +# ------------------------------------------------------------------------------ + +# Validate Core Connection +begin + logger.info "Validating connection to Core \"#{space_sdk.api_url}\"" + response = space_sdk.me() + if response.status == 0 + raise response.message + elsif response.status.to_s.match(/4\d{2}/) + raise response.content['error'] + end +rescue => error + logger.info error + exit +end + +# Validate Task Connection +begin + logger.info "Validating connection to Task \"#{task_sdk.api_url}\"" + response = task_sdk.environment() + if response.status == 0 + raise response.message + elsif response.status.to_s.match(/4\d{2}/) + raise response.content['error'] + end +rescue => error + logger.info error + exit +end + +logger.info "Validating connection to Cors and Task was Successful" + +# ------------------------------------------------------------------------------ +# core +# ------------------------------------------------------------------------------ + logger.info "Removing files and folders from the existing \"#{template_name}\" template." FileUtils.rm_rf Dir.glob("#{core_path}/*") From a702a59b4d2e30bf7f96d51ef38893d80cc8d550 Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Fri, 27 Oct 2023 09:45:35 -0500 Subject: [PATCH 08/19] Template selection, B64 encoding --- export.rb | 54 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 53 insertions(+), 1 deletion(-) diff --git a/export.rb b/export.rb index 63a67d3..6686076 100644 --- a/export.rb +++ b/export.rb @@ -193,7 +193,10 @@ def remove_discussion_id_attribute(model) logger.info "Validating configuration file." begin if File.exist?(file) != true - raise "The file \"#{options['CONFIG_FILE']}\" does not exist." + file = "#{config_folder_path}/#{options['CONFIG_FILE']}" + if File.exist?(file) != true + raise "The file \"#{options['CONFIG_FILE']}\" does not exist in the base or config directories." + end end rescue => error logger.info error @@ -210,6 +213,31 @@ def remove_discussion_id_attribute(model) end logger.info "Configuration file passed validation." +def SecurePWD(file,vars,pwdAttribute) + #If no pwd, then ask for one, otherwise take current string that was not found to be B64 and convert + if vars[pwdAttribute]["service_user_password"].nil? + password = IO::console.getpass "Enter Password(#{pwdAttribute}): " + else + password =vars[pwdAttribute]["service_user_password"] + end + enc = Base64.strict_encode64(password) + vars[pwdAttribute]["service_user_password"] = enc.to_s + File.open(file, 'w') {|f| f.write vars.to_yaml} + return password +end + +#Setup secure pwd function - Checks for nil and prompts for pwd, then b64 encodes and writes to yml +if !vars["core"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["core"]["service_user_password"])) != vars["core"]["service_user_password"] + vars["core"]["service_user_password"] = SecurePWD(file,vars,"core") +end +if !vars["task"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["task"]["service_user_password"])) != vars["task"]["service_user_password"] + vars["task"]["service_user_password"] = SecurePWD(file,vars,"task") +end + +#Write PT pwds into local variable +vars["core"]["service_user_password"] = Base64.decode64(vars["core"]["service_user_password"]) +vars["task"]["service_user_password"] = Base64.decode64(vars["task"]["service_user_password"]) + # Set http_options based on values provided in the config file. http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| result[k.to_sym] = v @@ -219,6 +247,29 @@ def remove_discussion_id_attribute(model) SUBMISSIONS_TO_EXPORT = vars["options"]["SUBMISSIONS_TO_EXPORT"] REMOVE_DATA_PROPERTIES = vars["options"]["REMOVE_DATA_PROPERTIES"] +#Config exports folder exists, if not then create +if !File.directory?(File.join(platform_template_path,"exports")) + Dir.mkdir(File.join(platform_template_path, "exports")) +end + +#Setting core paths utilzing variables +if !vars['core']['space_slug'].nil? + folderName = vars['core']['space_slug'] +elsif !vars['core']['space_name'].nil? + folderName = vars['core']['space_name'] +else + puts "No space slug or name provided! Please provide one in order to export..." + gets + exit +end +core_path = File.join(platform_template_path, "exports", folderName, "core") +task_path = File.join(platform_template_path, "exports", folderName, "task") + +puts "Core #{core_path}" +puts "Task #{task_path}" +gets +exit + # Output the yml file config logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" @@ -277,6 +328,7 @@ def remove_discussion_id_attribute(model) # core # ------------------------------------------------------------------------------ + logger.info "Removing files and folders from the existing \"#{template_name}\" template." FileUtils.rm_rf Dir.glob("#{core_path}/*") From 832b649bc24e94747d5452fcdefef41631538955 Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Tue, 2 Jan 2024 14:33:19 -0600 Subject: [PATCH 09/19] Removed standalone files, updated import to decode pwd, corrected bug with writing encoded pwd --- .gitignore | 4 +- StandAlone bundle/Compare_and_sync_spaces.ps1 | 36 - StandAlone bundle/Run Compare and Sync.cmd | 1 - StandAlone bundle/export.rb | 392 -------- StandAlone bundle/fiber.so | Bin 12800 -> 0 bytes StandAlone bundle/import.rb | 934 ------------------ .../servername_environment_import_config.yml | 1 + export.rb | 23 +- import.rb | 8 + 9 files changed, 27 insertions(+), 1372 deletions(-) delete mode 100644 StandAlone bundle/Compare_and_sync_spaces.ps1 delete mode 100644 StandAlone bundle/Run Compare and Sync.cmd delete mode 100644 StandAlone bundle/export.rb delete mode 100644 StandAlone bundle/fiber.so delete mode 100644 StandAlone bundle/import.rb diff --git a/.gitignore b/.gitignore index 7b7e94f..a6af917 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,6 @@ config/*.yml !config/servername_environment_import_config.yml !config/servername_environment_export_config.yml !config/servername_environment_export_specific_config.yml -exports/* \ No newline at end of file +exports/* +Local_Gems/* +Tools/* \ No newline at end of file diff --git a/StandAlone bundle/Compare_and_sync_spaces.ps1 b/StandAlone bundle/Compare_and_sync_spaces.ps1 deleted file mode 100644 index 401c6f0..0000000 --- a/StandAlone bundle/Compare_and_sync_spaces.ps1 +++ /dev/null @@ -1,36 +0,0 @@ -#Comparison script to see delta in OldSpace and NewSpace for syncing changes -#Written by Travis Wiese - 6/27/2023 - - -Write-Output "Select Option -1) Export Space -2) Import Space -3) Exit" -do{ - - $selection = read-host "Selection" - switch($selection) - { - 1 { - $ExportConfigList = Get-childitem -Path "Config/*" -Filter "*export*" -include *.yml,*.yaml - for($x=1;$x -ne $ExportConfigList.Length;$x++){Write-Output "$x) $($ExportConfigList[$x].Name)"} - $Opt = Read-host "Select config" - ruby './export.rb' -c "Config/$($ExportConfigList[$opt].Name)" - } - 2 { - $ImportConfigList = Get-childitem -Path "Config/*" -Filter "*import*" -include *.yml,*.yaml - for($x=1;$x -ne $ImportConfigList.Length;$x++){Write-Output "$x) $($ImportConfigList[$x].Name)"} - $Opt = Read-host "Select config" - # $config_file_path = read-host "Provide relative filepath for configuration(ex: config/export_config_Bluestone.yml)" - ruby './import.rb' -c "Config/$($ImportConfigList[$opt].Name)" - } - 3 {Exit} - default { - Write-Output $("Select Option - 1) Export Space - 2) Import Space - 3) Exit" -replace "(?m)^\s+") - } - } -}while($true) - diff --git a/StandAlone bundle/Run Compare and Sync.cmd b/StandAlone bundle/Run Compare and Sync.cmd deleted file mode 100644 index 2775bdb..0000000 --- a/StandAlone bundle/Run Compare and Sync.cmd +++ /dev/null @@ -1 +0,0 @@ -Powershell.exe -ExecutionPolicy Bypass -command ".\Compare_and_sync_spaces.ps1" \ No newline at end of file diff --git a/StandAlone bundle/export.rb b/StandAlone bundle/export.rb deleted file mode 100644 index 4d24933..0000000 --- a/StandAlone bundle/export.rb +++ /dev/null @@ -1,392 +0,0 @@ -# RUNNING THE SCRIPT: -# ruby export.rb -c "<>" -# ruby export.rb -c "config/foo-web-server.rb" -# -# Example Config File Values (See Readme for additional details) -# -# -=begin yml config file example - - --- - core: - # server_url: https://.kinops.io OR https://.com/kinetic/ - server_url: https://web-server.com - space_slug: - space_name: - service_user_username: - service_user_password: - options: - SUBMISSIONS_TO_EXPORT: - - datastore: true - formSlug: - - REMOVE_DATA_PROPERTIES: - - createdAt - - createdBy - - updatedAt - - updatedBy - - closedAt - - closedBy - - submittedAt - - submittedBy - - id - - authStrategy - - key - - handle - task: - # server_url: https://.kinops.io/app/components/task OR https://.com/kinetic-task - server_url: https://web-server.com - service_user_username: - service_user_password: - http_options: - log_level: info - log_output: stderr - -=end - - -#Export_Command ocra export.rb --no-dep-run --add-all-core --gem-files C:\Ruby32-x64\bin\ruby_builtin_dlls --gem-scripts C:\Ruby32-x64\lib\ruby\gems\3.2.0\gems\kinetic_sdk-5.0.21 --gemfile ../Gemfile -require 'logger' -require 'json' -require 'optparse' -require 'kinetic_sdk' -require 'Find' -require 'rexml' - -template_name = "platform-template" -# pwd = File.expand_path(File.dirname(__FILE__)) -# pwd = File.path('C:\Users\travis.wiese\Source\repos\platform-template\StandAlone bundle') -pwd = Dir.pwd -# logger = Logger.new(STDERR) -begin - logger = Logger.new("#{pwd}\\output.log") -rescue - logger = Logger.new('C:\temp\KD_export_output.log') - #TODO - Ask for path of configs -end -logger.level = Logger::INFO -logger.formatter = proc do |severity, datetime, progname, msg| - date_format = datetime.utc.strftime("%Y-%m-%dT%H:%M:%S.%LZ") - "[#{date_format}] #{severity}: #{msg}\n" -end - -logger.info "Base directory: #{pwd}" -# Determine the Present Working Directory -configFile = nil - -ARGV << '-h' if ARGV.empty? -# The options specified on the command line will be collected in *options*. -options = {} -OptionParser.new do |opts| - break if opts === nil - opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| - configFile = config - end - -end.parse! - - -# determine the directory paths - # platform_template_path = File.dirname(File.expand_path(__FILE__)) - platform_template_path = Dir.pwd - config_folder_path = File.join(platform_template_path,'config') - - #If no config passed through, run config selector -if configFile == nil - configFile = config_selection -end -puts "config: #{configFile}" - - logger.info "Post selection" - -# ------------------------------------------------------------------------------ -# methods -# ------------------------------------------------------------------------------ - -#Selection method if not passing config file path -def config_selection - - - - #Ensure config folder exists - if !File.directory?(config_folder_path) - logger.info "Config folder not found at #{config_folder_path}" - puts "Cannot find config folder!" - puts "Exiting..." - gets - exit - end - - # #Determine Config file to use - config_exts = ['.yaml','.yml'] - configArray = [] - logger.info "Checking #{config_folder_path} for config files" - begin - Find.find("#{config_folder_path}/") do |file| - configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('export')) - end - rescue - #No config files found in config folder - logger.info "Error finding default config file path!" - puts "Cannot find config files in default path! (#{pwd})" - puts "Exiting script..." - gets - exit - end - logger.info "Found config files" - - - - #Print config file options with number indicators to select - puts "Select your config file" - configArray.each_with_index do |cFile, index| - puts "#{index+1}) #{cFile}" - end - logger.info "Select section" - begin - print "Selection (0 to repeat options): " - sel = gets.chomp.to_i - begin - puts "sel is #{sel}" - if sel === 0 - configArray.each_with_index do |cFile, index| - puts "#{index+1}) #{cFile}" - end - next - end - configFile = configArray[sel-1] - logger.info "Option #{sel} - #{configFile}" - break - rescue - logger.info "Error selecting config file!" - puts "Error selecting config file!" - puts "Exiting..." - gets - exit - end - end while true - return configFile -end - -# Removes discussion id attribute from a given model -def remove_discussion_id_attribute(model) - if !model.is_a?(Array) - if model.has_key?("attributes") - scrubbed = model["attributes"].select do |attribute| - attribute["name"] != "Discussion Id" - end - end - model["attributes"] = scrubbed - end - return model -end - -# ------------------------------------------------------------------------------ -# constants -# ------------------------------------------------------------------------------ - - -# ------------------------------------------------------------------------------ -# setup -# ------------------------------------------------------------------------------ - -logger.info "Installing gems for the \"#{template_name}\" template." -Dir.chdir(platform_template_path) { system("bundle", "install") } - -vars = {} - -# Read the config file specified from the above selection - -if File.file?(file = "#{config_folder_path}/#{configFile}") - vars.merge!( YAML.load(File.read(file)) ) -end - -# Set http_options based on values provided in the config file. -http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| - result[k.to_sym] = v -end - -# Set variables based on values provided in the config file. -SUBMISSIONS_TO_EXPORT = vars["options"]["SUBMISSIONS_TO_EXPORT"] -REMOVE_DATA_PROPERTIES = vars["options"]["REMOVE_DATA_PROPERTIES"] - -# ------------------------------------------------------------------------------ -# core -# ------------------------------------------------------------------------------ - -#Setting core paths -core_path = File.join(platform_template_path, "exports", vars['core']['space_slug'], "core") -task_path = File.join(platform_template_path, "exports", vars['core']['space_slug'], "task") - -logger.info "Setting up the Core SDK" - -space_sdk = KineticSdk::Core.new({ - space_server_url: vars["core"]["server_url"], - space_slug: vars["core"]["space_slug"], - username: vars["core"]["service_user_username"], - password: vars["core"]["service_user_password"], - options: http_options.merge({ export_directory: "#{core_path}" }) -}) - - - -logger.info "Removing files and folders from the existing \"#{template_name}\" template." -FileUtils.rm_rf Dir.glob("#{core_path}/*") - -# fetch export from core service and write to export directory -logger.info "Exporting the core components for the \"#{template_name}\" template." -logger.info " exporting with api: #{space_sdk.api_url}" -logger.info " - exporting configuration data (Kapps,forms, etc)" -space_sdk.export_space - -# cleanup properties that should not be committed with export -# bridge keys -Dir["#{core_path}/space/bridges/*.json"].each do |filename| - bridge = JSON.parse(File.read(filename)) - if bridge.has_key?("key") - bridge.delete("key") - File.open(filename, 'w') { |file| file.write(JSON.pretty_generate(bridge)) } - end -end - -# cleanup space -filename = "#{core_path}/space.json" -space = JSON.parse(File.read(filename)) -# filestore key -if space.has_key?("filestore") && space["filestore"].has_key?("key") - space["filestore"].delete("key") -end -# platform components -if space.has_key?("platformComponents") - if space["platformComponents"].has_key?("task") - space["platformComponents"].delete("task") - end - (space["platformComponents"]["agents"] || []).each_with_index do |agent,idx| - space["platformComponents"]["agents"][idx]["url"] = "" - end -end -# rewrite the space file -File.open(filename, 'w') { |file| file.write(JSON.pretty_generate(space)) } - -# cleanup discussion ids -Dir["#{core_path}/**/*.json"].each do |filename| - model = remove_discussion_id_attribute(JSON.parse(File.read(filename))) - File.open(filename, 'w') { |file| file.write(JSON.pretty_generate(model)) } -end - -# export submissions -logger.info "Exporting and writing submission data" -(SUBMISSIONS_TO_EXPORT || []).delete_if{ |item| item["kappSlug"].nil?}.each do |item| - is_datastore = item["datastore"] || false - logger.info "Exporting - #{is_datastore ? 'datastore' : 'kapp'} form #{item['formSlug']}" - # build directory to write files to - submission_path = is_datastore ? - "#{core_path}/space/datastore/forms/#{item['formSlug']}" : - "#{core_path}/space/kapps/#{item['kappSlug']}/forms/#{item['formSlug']}" - - # get attachment fields from form definition - attachment_form = is_datastore ? - space_sdk.find_datastore_form(item['formSlug'], {"include" => "fields.details"}) : - space_sdk.find_form(item['kappSlug'], item['formSlug'], {"include" => "fields.details"}) - - # get attachment fields from form definition - attachement_files = attachment_form.status == 200 ? attachment_form.content['form']['fields'].select{ | file | file['dataType'] == "file" }.map { | field | field['name'] } : {} - - # set base url for attachments - attachment_base_url = is_datastore ? - "#{space_sdk.api_url.gsub("/app/api/v1", "")}/app/datastore" : - "#{space_sdk.api_url.gsub("/app/api/v1", "")}" - - # create folder to write submission data to - FileUtils.mkdir_p(submission_path, :mode => 0700) - - # build params to pass to the retrieve_form_submissions method - params = {"include" => "details,children,origin,parent,values", "limit" => 1000, "direction" => "ASC"} - - # open the submissions file in write mode - file = File.open("#{submission_path}/submissions.ndjson", 'w'); - - # ensure the file is empty - file.truncate(0) - response = nil - begin - # get submissions from datastore form or form - response = is_datastore ? - space_sdk.find_all_form_datastore_submissions(item['formSlug'], params).content : - space_sdk.find_form_submissions(item['kappSlug'], item['formSlug'], params).content - if response.has_key?("submissions") - # iterate over each submission - (response["submissions"] || []).each do |submission| - # write each attachment to a a dir - submission['values'].select{ |field, value| attachement_files.include?(field)}.each{ |field,value| - submission_id = submission['id'] - # define the dir to contain the attahment - download_dir = "#{submission_path}/#{submission_id}/#{field}" - # evaluate fields with multiple attachments - value.map.with_index{ | attachment, index | - # create folder to write attachment - FileUtils.mkdir_p(download_dir, :mode => 0700) - # dir and file name to write attachment - download_path = "#{download_dir}/#{File.join(".", attachment['name'])}" - # url to retrieve the attachment - url = URI.escape("#{attachment_base_url}/submissions/#{submission_id}/files/#{field}/#{index}/#{attachment['name']}") - # retrieve and write attachment - space_sdk.stream_download_to_file(download_path, url, {}, space_sdk.default_headers) - # add the "path" key to indicate the attachment's location - attachment['path'] = "/#{submission_id}/#{field}/#{attachment['name']}" - } - } - # append each submission (removing the submission unwanted attributes) - file.puts(JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)})) - end - end - params['pageToken'] = response['nextPageToken'] - # get next page of submissions if there are more - end while !response.nil? && !response['nextPageToken'].nil? - # close the submissions file - file.close() -end -logger.info " - submission data export complete" - -# ------------------------------------------------------------------------------ -# task -# ------------------------------------------------------------------------------ - - -task_sdk = KineticSdk::Task.new({ - app_server_url: "#{vars["task"]["server_url"]}", - username: vars["task"]["service_user_username"], - password: vars["task"]["service_user_password"], - options: http_options.merge({ export_directory: "#{task_path}" }) -}) - - -logger.info "Removing files and folders from the existing \"#{template_name}\" template." -FileUtils.rm_rf Dir.glob("#{task_path}/*") - -logger.info "Exporting the task components for the \"#{template_name}\" template." -logger.info " exporting with api: #{task_sdk.api_url}" - -# export all sources, trees, routines, handlers, -# groups, policy rules, categories, and access keys -task_sdk.export_sources() -task_sdk.find_sources().content['sourceRoots'].each do |source| - task_sdk.find_trees({ "source" => source['name'] }).content['trees'].each do |tree| - task_sdk.export_tree(tree['title']) - end -end -task_sdk.export_routines() -task_sdk.export_handlers() -task_sdk.export_groups() -task_sdk.export_policy_rules() -task_sdk.export_categories() -task_sdk.export_access_keys() - -# Export workflows as these are not the same as Trees and Routines -space_sdk.export_workflows() - -# ------------------------------------------------------------------------------ -# complete -# ------------------------------------------------------------------------------ - -logger.info "Finished exporting the \"#{template_name}\" template." diff --git a/StandAlone bundle/fiber.so b/StandAlone bundle/fiber.so deleted file mode 100644 index 08ab26a531b90dec0047c6b88eeaf8a1ae2b24b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12800 zcmeHN4Rl+@l^$7=tt56Nh)oSNo0s5#(^Q8JUNoHh>nNTtr#&!eJ<5J(B{OU#X%sG2! zvL~j!ICr|%#z zAJHF2AwG(+oJZx01UtG~^|?-=k@y@rOhBQxHlOnOtMNq1ADb9U@3%2#o#SHemF~us zH?C^nv3DxH(hK#{v++aGZF1Q=V5&;R58bvt_FDs&XFpNJSY6feXK0$GvR|N;%Zq;5 z%h*`4b&iLzJqi|4<`thbI?k;>y@E>zcsvzNb9vd_=qVS)P83-C-bLpco# z#bf&`*RF56*}cwv^FHew7CULM#vemNkjtj|Gyzb48ANb7Y^lmX>6%_BEw|3;LmSA> z1J*vQ{j338Iza|pLfVfJ|B~sXzc6;fpy`(j<0)(3!|0(C_X3S)tbN}iX|h>{PA)U_ z7fb3TWiQ2nPx^IcBxZhXJ!6KCvl`<+o!qYa1^$-MO=-h`hl(JJ@jEY3nbU?$CX-Yh znqCQ1kLZs}#8v=4K?UN(>443DJ5C5qKdt%s`)`J}W` z4H>9-q@z?T&8@km1Io|-E0clGjdfKYdZh88LMY2%a)q_)X=>f;_i5u(p2C85Cp;CI z30at$p0Lub_7_tVWaXzN;FJ!)l8x@zDMK`@JciMueJ?qIjQGbaNAw<$H8W+4M=irqjP1g8jK*tV{DsB4H%3_T@Euf{n-JT6U_}{$M z4xhs!xcG;E%4`Uf9X-bVbq|E%t-x47+zeR<# zPNB2z%M(#LPmQz82&fkmHQ~2#*=6BUnLt%q2eim^|7$B9QPaJMBos&ku@f2589yir z(r_$o;E68}@&&^@Ar77(`!ch5Y|O%)???Xt({F>-+KNqIpbh|ggf_39gv%feTpe3F zh?z-6$H#u6(Pi_aYt?8f#OlU688SbBI!$2u>yVJ7@x)FV#V^GViL)ja5gDtkZ$(Bz zo1`bR_M0EVW*@655DSOYtvYtHK%9lmL%eo^c`afN{+@l4OkP=X0|Doo;=gO_QZ~{O zFM!`-`!K#Yqrc1RIH7+=u6`0ZIr-$Ds#N|Ax*KULt-)5x5#+1)Q2cPIv0}AP@>B$T zlCNR|2Ue;4TQQ=0_xWg6!x_#U_zFetfb!mSnwfPKm1<|@Q843JE&F-v?D!h$|3dtT_1ovb!H9<*P^u2{SgL%&pIo;2oJV^~eF%{{6rW+x>wMYz ztvg{zY`_5CO6Tj**RkU%bFp1%`?$65Z5oDbz7PTtArJC_V@7T`%%!lPMZQvj^M-tF zF;6sB2=3k3iWoNxpvMO{kq}>bd8X{WZ9r-By&bKxr^1u)r(#E+5iJS-so2qvqNOqO z0Jvepen0IR2pAVb-r%yYLf~>^MU8tmHV?Mjg_p-#m*U3R-&4h3WARF_bE(0WDeHq0 zg!~hkvNfbGQF3en64s#7hLx$#*AaCI!?HE#DzCDD{S-z5m&`u`3E7n|ILu=wif(%V z?-|zbEP`a&2&cyHCv3Ti(7On2u&Al@Ux6dSi+#>x*4TQiIG?;|H*|P^4*j?RV4tfZ zzgTyj!=?AU(ujBaFp>KXd98eeFBtS1UQ_-KJE!NaD|I>Dt+G+>Hd%KyAbJ#^;XizG zP3)v#?NcDpdgoreV<#=*Phux+)_4{4Tc6C44dLx^O*qWb604!6YD4{b&nf`G_wOJf*K+^FM$ALFkcL9VKfn`)yq2mU<-XS7Sp6qkWZE zEDM9_n}^cc^ke^#XCcRJgrlT^8P`~>@fS!s3`y?-qMkoo*7!Fu1TK@&x_u9vqPVy+ zeG*LizJ}J9eA9j(Kgg}#mnJzoz=?sRM^texruzLY49_Poap4>b$9d$hLL+?lp~q-P zPo+lfW2eE7rj2ku;_se|5HXK^q~e_Lhkzq5xEuAu8hA!lVFt@K_K>l&A3}x4kBHSc za`mXBC947C8JX9-(*De=SQInsVj}YeSFd8xR$z-r7;jy^d$*FG5$< z&zd$tHeUk{>6ct0LDRvC#NLGcb#FO*wFi^T6fecMw2!vcr4klKiGzenVN0g$z-C~w zIkOTVQBtnbXTL`^X*m|{Gib)aM;96!&W!yqyZ_Q|3hgg~S9%_*tvVkzhh8WpaRC5%BAb1s>d@FsJ^#X>^Ra!(2IpZeyKT`ij%0HomuhWF0w)}ZxI-0DGie6I zYxxH&&x^*fS9E=<^!Bh!IdeyggBOhBj@k|6#0F- zvwls~%3H5r$_+1VchU`Bv}FBH?OV%d6j;BT0?EaAt9vPS5uPZuSf8vd#qL#5Zhi8G zLiox-H;F{)eZGWmq%!MYnHS@TNeft*J6m?mj^q2Y)Z8n(T!nt2`#}t@g z>h&hX{grfcVO|8i*ZF~Xk$X4xVYu~Il={{~6XpOX-BB$nk&U!db zOK~XJetG^pw{PD@9vz=LsTb*%Bk|LuvJYJs3D{$>0-xn{oWxvm=SWmhO~$xjuk&N< zozREFqZ&t=KA+W-BFf~M3;q8%@AUCb^q(BBW-nhbFxBKYa-ID#el8`Wz0QH?F?cmA z4}2Bw{~U1-D07H3v3`8=?2u~DK{)UVih%mo@?sDhw9*Q2V zt2#((z~?g@9-9uR`Mwlpn;RgIXRpN%-u@d8)?B>-N4#MiU5^~gKfhk@=ga5= z8Bh2s$|L9U?P;|Zo5uJe6;oS#-iBW}h^ueP@19h*vK-pfdS?EbuWR?2t95v}4(I9c zY#kQp@b_Vj|Bwy`RX7pFnw0%#WLDwc} zkWS14CyjjIsEJFjD8(caRLtoLZul*iaBd!_qw3frYJS~IiuVYleHu)qcCen$6G=)V zNRLq1U{gN`sACzEG*h2O4UnUucXEg3%{m$mR7GKD0u2(hAi{+L(rr zwxRu2yy$#TLH9kc^NpTaT)QH@OIIY7wE1_mH?BeE$|h@Ui}~pyspd!Q)$}P2DJoIw zRXt&V3>+Eu;T~JtrOoHrIxNuP(N2y2oDTQu@KGIpUx#<=aGMS{>Trz;(+`g4r7)U* zdPe8z-;rsscr+?~0smq>2&%ltcEKn6h3>Dp&*)s8Uxy7mmsOQqf^dpcM+7uMpn?oJZNbquev%uqSYl{TC zyM_5-VMBAb(Agz~HZ|W8T+|(G6+>N}Ob}LuB4V_;=v3up?N}zu6DAYKhh;aR<>tPNZcT_;zuaKv1LB4kb$M zX3|YRWQkp^T^+*aV5FM}^V{s2)t}t2B_s;np>4rj{;XXNN50?2r>qa#iv0Geb>_-% z#}5^+KUG8tTCB ze_Mt6hMO9y$(b0#5Ro$0B50m06YNLj&cHc7;rPnDc~c3e?Vc)D0A^Y6ciKiQNZi*QK1xg0PuDcf#`t0 zL8%1(zkLkoXecgX#kCekQJ^qv_M1`#BSb4?#nmb;Wg00MRr!cFx0npfR0~?6krmb! zI1B;3{ePkt8Cg+_(NT~BpT)#1_n92VKtak-HhoE%(biJzm=TyBo|Y;aF^?L@4SvHk zGn-axc9;Uju-d1{#EPoXKVS$O%mvI`ZE#G$tX(9xT9=!peHyfwnWa|k6E^sD+p;pU zdLZMk^*Q#@TLBtdb+0~uz-gWtH6ovRK9?Tw zCnwGJ-H(0cUhKCyv_8;kKzlrgHVj%XXdm~C>+};-+RN|LxE(s=I$W(oQHS^F@cTOa zu@1Akp4RKn>F}@)kL&Oq9e$|8Kk3i{o9VIX@H`z>=+Lgi8Xaof*t`GA#&d)57V>&LCsw8$7{|pcq^p35lWB=8gvaKK?aBU8fj~ zOeM3;1y=<{Uvsxu7m0L5@Efa-% z?^d?h*c9zlu{oyZmad4%=9(}x^?!a|Q+p(caj!D9_h87t9x_FOTH~{(ZZXmk#P`Hu z!a7?wh1ol%&F$exs8ei5q|n+x*>dUPMVq=ew?@Q8D^sM%fN$;295`@xWiM2L#;zVBLwG zcsbN78ngAg`gy;s!WT1}?pj_nza_0Gq_P~DwwGkSC diff --git a/StandAlone bundle/import.rb b/StandAlone bundle/import.rb deleted file mode 100644 index 03df699..0000000 --- a/StandAlone bundle/import.rb +++ /dev/null @@ -1,934 +0,0 @@ -# NOTES -# This is a migration tool not an installation tool. There are certain expectations that the destination is configured and working. -# Agent Server(s) must be added ahead of migration. /space/settings/platformComponents/agents -# Task Server must be added ahead of migration. /space/settings/platformComponents/task -# Task Sources must be manually maintained -# Bridges must be added ahead of migration. /space/plugins/bridges -# Agent Handlers are not migrated by design. They intentionally must be manually added. -# Teams are not deleted from destination. It could be too dangerous to delete them. - -# TODO - -# RUNNING THE SCRIPT: -# ruby import_script.rb -c "<>" -# ruby import_script -c "config/foo-web-server.rb" -# -# Example Config File Values (See Readme for additional details) -# -=begin yml config file example - --- - core: - # server_url: https://.kinops.io OR https://.com/kinetic/ - server_url: https://web-server.com - space_slug: - space_name: - service_user_username: - service_user_password: - options: - delete: true - task: - # server_url: https://.kinops.io/app/components/task OR https://.com/kinetic-task - server_url: https://web-server.com - service_user_username: - service_user_password: - http_options: - log_level: info - log_output: stderr -=end - -#Export_command ocra import.rb --no-dep-run --add-all-core --gem-files C:\Ruby32-x64\bin\ruby_builtin_dlls --gem-scripts C:\Ruby32-x64\lib\ruby\gems\3.2.0\gems\kinetic_sdk-5.0.21 --gemfile ../Gemfile - -require 'logger' -require 'json' -require 'rexml/document' -require 'optparse' -require 'kinetic_sdk' -include REXML -require 'find' - -template_name = "platform-template" - -logger = Logger.new(STDERR) -logger.level = Logger::INFO -logger.formatter = proc do |severity, datetime, progname, msg| - date_format = datetime.utc.strftime("%Y-%m-%dT%H:%M:%S.%LZ") - "[#{date_format}] #{severity}: #{msg}\n" -end - -######################################### - -# Determine the Present Working Directory -pwd = Dir.pwd - -# ARGV << '-h' if ARGV.empty? - -# # The options specified on the command line will be collected in *options*. -# options = {} -# OptionParser.new do |opts| -# opts.banner = "Usage: example.rb [options]" - -# opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| -# options["CONFIG_FILE"] = config -# end - -# # No argument, shows at tail. This will print an options summary. -# # Try it and see! -# opts.on_tail("-h", "--help", "Show this message") do -# puts opts -# exit -# end -# end.parse! - -# #Now raise an exception if we have not found a CONFIG_FILE option -# raise OptionParser::MissingArgument if options["CONFIG_FILE"].nil? - - -# determine the directory paths -platform_template_path = pwd -config_folder_path = File.join(platform_template_path,'config') - -if !File.directory?(config_folder_path) - logger.info "Config folder not found at #{config_folder_path}" - puts "Cannot find config folder!" - puts "Exiting..." - gets - exit -end - -# #Determine Config file to use -config_exts = ['.yaml','.yml'] -configArray = [] -logger.info "Checking #{config_folder_path} for config files" -Find.find("#{config_folder_path}/") do |file| - configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('import')) -end - -puts "Select your config file" -configArray.each_with_index do |cFile, index| - puts "#{index+1}) #{cFile}" -end -print "Selection: " -sel = gets.chomp.to_i -configFile = configArray[sel-1] - - -# ------------------------------------------------------------------------------ -# methods -# ------------------------------------------------------------------------------ - - - -# ------------------------------------------------------------------------------ -# constants -# ------------------------------------------------------------------------------ - - - -# ------------------------------------------------------------------------------ -# setup -# ------------------------------------------------------------------------------ - -logger.info "Installing gems for the \"#{template_name}\" template." -Dir.chdir(platform_template_path) { system("bundle", "install") } - - - -# ------------------------------------------------------------------------------ -# core -# ------------------------------------------------------------------------------ - - - -vars = {} -# Read the config file specified in the command line into the variable "vars" -if File.file?(file = "#{config_folder_path}/#{configFile}") - vars.merge!( YAML.load(File.read(file)) ) -elsif - raise "Config file not found: #{file}" -end - -#Setting core paths -core_path = File.join(platform_template_path, "exports", vars['core']['old_space_slug'], "core") -task_path = File.join(platform_template_path, "exports", vars['core']['old_space_slug'], "task") - -# Set http_options based on values provided in the config file. -http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| - result[k.to_sym] = v -end - -# Set option values to default values if not included -vars["options"] = !vars["options"].nil? ? vars["options"] : {} -vars["options"]["delete"] = !vars["options"]["delete"].nil? ? vars["options"]["delete"] : false - -logger.info "Importing using the config: #{JSON.pretty_generate(vars)}" - - - -space_sdk = KineticSdk::Core.new({ - space_server_url: vars["core"]["server_url"], - space_slug: vars["core"]["space_slug"], - username: vars["core"]["service_user_username"], - password: vars["core"]["service_user_password"], - options: http_options.merge({ export_directory: "#{core_path}" }) -}) - -puts "Are you sure you want to perform an import of data to #{vars["core"]["server_url"]}? [Y/N]" -STDOUT.flush -case (gets.downcase.chomp) -when 'y' - puts "Continuing Import" - STDOUT.flush -else - abort "Exiting Import" -end - -################################################################### -# ------------------------------------------------------------------------------ -# Update Space Attributes -# ------------------------------------------------------------------------------ - -sourceSpaceAttributeArray = [] -destinationSpaceAttributeArray = (space_sdk.find_space_attribute_definitions().content['spaceAttributeDefinitions']|| {}).map { |definition| definition['name']} - -if File.file?(file = "#{core_path}/space/spaceAttributeDefinitions.json") - spaceAttributeDefinitions = JSON.parse(File.read(file)) - - spaceAttributeDefinitions.each { |attribute| - if destinationSpaceAttributeArray.include?(attribute['name']) - space_sdk.update_space_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_space_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceSpaceAttributeArray.push(attribute['name']) - } -end - -destinationSpaceAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceSpaceAttributeArray.include?(attribute) - space_sdk.delete_space_attribute_definition(attribute) - end -} - -# ------------------------------------------------------------------------------ -# Update User Attributes -# ------------------------------------------------------------------------------ -sourceUserAttributeArray = [] -destinationUserAttributeArray = (space_sdk.find_user_attribute_definitions().content['userAttributeDefinitions'] || {}).map { |definition| definition['name']} - -if File.file?(file = "#{core_path}/space/userAttributeDefinitions.json") - userAttributeDefinitions = JSON.parse(File.read(file)) - userAttributeDefinitions.each { |attribute| - if destinationUserAttributeArray.include?(attribute['name']) - space_sdk.update_user_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_user_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceUserAttributeArray.push(attribute['name']) - } -end - -destinationUserAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceUserAttributeArray.include?(attribute) - space_sdk.delete_user_attribute_definition(attribute) - end -} -# ------------------------------------------------------------------------------ -# Update User Profile Attributes -# ------------------------------------------------------------------------------ - -sourceUserProfileAttributeArray = [] -destinationUserProfileAttributeArray = (space_sdk.find_user_profile_attribute_definitions().content['userProfileAttributeDefinitions'] || {}).map { |definition| definition['name']} - -if File.file?(file = "#{core_path}/space/userProfileAttributeDefinitions.json") - userProfileAttributeDefinitions = JSON.parse(File.read(file)) - - userProfileAttributeDefinitions.each { |attribute| - if destinationUserProfileAttributeArray.include?(attribute['name']) - space_sdk.update_user_profile_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_user_profile_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceUserProfileAttributeArray.push(attribute['name']) - } -end - -destinationUserProfileAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceUserProfileAttributeArray.include?(attribute) - space_sdk.delete_user_profile_attribute_definition(attribute) - end -} - - -# ------------------------------------------------------------------------------ -# Update Team Attributes -# ------------------------------------------------------------------------------ - -sourceTeamAttributeArray = [] -destinationTeamAttributeArray = (space_sdk.find_team_attribute_definitions().content['teamAttributeDefinitions']|| {}).map { |definition| definition['name']} - -if File.file?(file = "#{core_path}/space/teamAttributeDefinitions.json") - teamAttributeDefinitions = JSON.parse(File.read(file)) - teamAttributeDefinitions.each { |attribute| - if destinationTeamAttributeArray.include?(attribute['name']) - space_sdk.update_team_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_team_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceTeamAttributeArray.push(attribute['name']) - } -end - -destinationTeamAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceTeamAttributeArray.include?(attribute) - space_sdk.delete_team_attribute_definition(attribute) - end -} - - -# ------------------------------------------------------------------------------ -# Update Datastore Attributes -# ------------------------------------------------------------------------------ - -sourceDatastoreAttributeArray = [] -destinationDatastoreAttributeArray =(space_sdk.find_datastore_form_attribute_definitions().content['datastoreFormAttributeDefinitions'] || {}).map { |definition| definition['name']} - -if File.file?(file = "#{core_path}/space/datastoreFormAttributeDefinitions.json") - datastoreFormAttributeDefinitions = JSON.parse(File.read(file)) - datastoreFormAttributeDefinitions.each { |attribute| - if destinationDatastoreAttributeArray.include?(attribute['name']) - space_sdk.update_datastore_form_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_datastore_form_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceDatastoreAttributeArray.push(attribute['name']) - } -end - -destinationDatastoreAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceDatastoreAttributeArray.include?(attribute) - #Delete form is disabled - #space_sdk.delete_datastore_form_attribute_definition(attribute) - end -} - - -# ------------------------------------------------------------------------------ -# Update Security Policy -# ------------------------------------------------------------------------------ - -sourceSecurityPolicyArray = [] -destinationSecurityPolicyArray = (space_sdk.find_space_security_policy_definitions().content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} - -if File.file?(file = "#{core_path}/space/securityPolicyDefinitions.json") - securityPolicyDefinitions = JSON.parse(File.read(file)) - securityPolicyDefinitions.each { |attribute| - if destinationSecurityPolicyArray.include?(attribute['name']) - space_sdk.update_space_security_policy_definition(attribute['name'], attribute) - else - space_sdk.add_space_security_policy_definition(attribute) - end - sourceSecurityPolicyArray.push(attribute['name']) - } -end - -destinationSecurityPolicyArray.each { | attribute | - if vars["options"]["delete"] && !sourceSecurityPolicyArray.include?(attribute) - space_sdk.delete_space_security_policy_definition(attribute) - end -} - - -# ------------------------------------------------------------------------------ -# import bridge models -# *NOTE* - This if the bridge doesn't exist the model will be imported w/ an empty "Bridge Slug" value. -# ------------------------------------------------------------------------------ - -destinationModels = space_sdk.find_bridge_models() -destinationModels_Array = (destinationModels.content['models'] || {}).map{ |model| model['name']} - -Dir["#{core_path}/space/models/*.json"].each{ |model| - body = JSON.parse(File.read(model)) - if destinationModels_Array.include?(body['name']) - space_sdk.update_bridge_model(body['name'], body) - elsif - space_sdk.add_bridge_model(body) - end -} - -# ------------------------------------------------------------------------------ -# delete bridge models -# Delete any Bridges from the destination which are missing from the import data -# ------------------------------------------------------------------------------ -SourceModelsArray = Dir["#{core_path}/space/models/*.json"].map{ |model| JSON.parse(File.read(model))['name'] } - -destinationModels_Array.each do |model| - if vars["options"]["delete"] && !SourceModelsArray.include?(model) - space_sdk.delete_bridge_model(model) - end -end - -# ------------------------------------------------------------------------------ -# Import Space Web APIs -# ------------------------------------------------------------------------------ - -sourceSpaceWebApisArray = [] -destinationSpaceWebApisArray = (space_sdk.find_space_webapis().content['webApis'] || {}).map { |definition| definition['slug']} - - -Dir["#{core_path}/space/webApis/*"].each{ |file| - body = JSON.parse(File.read(file)) - if destinationSpaceWebApisArray.include?(body['slug']) - space_sdk.update_space_webapi(body['slug'], body) - else - space_sdk.add_space_webapi(body) - end - sourceSpaceWebApisArray.push(body['slug']) -} - -# ------------------------------------------------------------------------------ -# Delete Space Web APIs -# Delete any Web APIs from the destination which are missing from the import data -# ------------------------------------------------------------------------------ -destinationSpaceWebApisArray.each { | webApi | - if vars["options"]["delete"] && !sourceSpaceWebApisArray.include?(webApi) - space_sdk.delete_space_webapi(webApi) - end -} - -# ------------------------------------------------------------------------------ -# import datastore forms -# ------------------------------------------------------------------------------ -destinationDatastoreForms = [] #From destination server -sourceDatastoreForms = [] #From import data - -logger.info "Importing datastore forms for #{vars["core"]["space_slug"]}" - - destinationDatastoreForms = (space_sdk.find_datastore_forms().content['forms'] || {}).map{ |datastore| datastore['slug']} - Dir["#{core_path}/space/datastore/forms/*.json"].each { |datastore| - body = JSON.parse(File.read(datastore)) - sourceDatastoreForms.push(body['slug']) - if destinationDatastoreForms.include?(body['slug']) - space_sdk.update_datastore_form(body['slug'], body) - else - space_sdk.add_datastore_form(body) - end - } - -# ------------------------------------------------------------------------------ -# delete datastore forms -# Delete any form from the destination which are missing from the import data -# ------------------------------------------------------------------------------ - - -destinationDatastoreForms.each { |datastore_slug| - if vars["options"]["delete"] && !sourceDatastoreForms.include?(datastore_slug) - space_sdk.delete_datastore_form(datastore_slug) - end -} - -# ------------------------------------------------------------------------------ -# Import Datastore Data -# ------------------------------------------------------------------------------ -Dir["#{core_path}/space/datastore/forms/**/submissions*.ndjson"].sort.each { |filename| - dir = File.dirname(filename) - form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] - (space_sdk.find_all_form_datastore_submissions(form_slug).content['submissions'] || []).each { |submission| - space_sdk.delete_datastore_submission(submission['id']) - } - File.readlines(filename).each { |line| - submission = JSON.parse(line) - submission["values"].map { |field, value| - # if the value contains an array of files - if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') - value.map.with_index { |file, index| - # add 'path' key to the attribute value indicating the location of the attachment - file['path'] = "#{dir}#{file['path']}" - } - end - } - body = { - "values" => submission["values"], - "coreState" => submission["coreState"] - } - space_sdk.add_datastore_submission(form_slug, body).content - } -} - -# ------------------------------------------------------------------------------ -# import space teams -# ------------------------------------------------------------------------------ -if (teams = Dir["#{core_path}/space/teams/*.json"]).length > 0 - SourceTeamArray = [] - destinationTeamsArray = (space_sdk.find_teams().content['teams'] || {}).map{ |team| {"slug" => team['slug'], "name"=>team['name']} } - teams.each{ |team| - body = JSON.parse(File.read(team)) - if !destinationTeamsArray.find {|destination_team| destination_team['slug'] == body['slug'] }.nil? - space_sdk.update_team(body['slug'], body) - else - space_sdk.add_team(body) - end - #Add Attributes to the Team - (body['attributes'] || []).each{ | attribute | - space_sdk.add_team_attribute(body['name'], attribute['name'], attribute['values']) - } - SourceTeamArray.push({'name' => body['name'], 'slug'=>body['slug']} ) - } - - # ------------------------------------------------------------------------------ - # delete space teams - # TODO: A method doesn't exist for deleting the team - # ------------------------------------------------------------------------------ - - destinationTeamsArray.each { |team| - #if !SourceTeamArray.include?(team) - if SourceTeamArray.find {|source_team| source_team['slug'] == team['slug'] }.nil? - #Delete has been disabled. It is potentially too dangerous to include w/o advanced knowledge. - #space_sdk.delete_team(team['slug']) - end - } -end - -# ------------------------------------------------------------------------------ -# import kapp data -# ------------------------------------------------------------------------------ - -kapps_array = [] -Dir["#{core_path}/space/kapps/*"].each { |file| - kapp_slug = file.split(File::SEPARATOR).map {|x| x=="" ? File::SEPARATOR : x}.last.gsub('.json','') - next if kapps_array.include?(kapp_slug) # If the loop has already iterated over the kapp from the kapp file or the kapp dir skip the iteration - kapps_array.push(kapp_slug) # Append the kapp_slug to an array so a duplicate iteration doesn't occur - kapp = {} - kapp['slug'] = kapp_slug # set kapp_slug - - if File.file?(file) or ( File.directory?(file) and File.file?(file = "#{file}.json") ) # If the file is a file or a dir with a corresponding json file - kapp = JSON.parse( File.read(file) ) - kappExists = space_sdk.find_kapp(kapp['slug']).code.to_i == 200 - if kappExists - space_sdk.update_kapp(kapp['slug'], kapp) - else - space_sdk.add_kapp(kapp['name'], kapp['slug'], kapp) - end - end - - # ------------------------------------------------------------------------------ - # Migrate Kapp Attribute Definitions - # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/kappAttributeDefinitions.json") - sourceKappAttributeArray = [] - destinationKappAttributeArray = (space_sdk.find_kapp_attribute_definitions(kapp['slug']).content['kappAttributeDefinitions'] || {}).map { |definition| definition['name']} - kappAttributeDefinitions = JSON.parse(File.read(file)) - (kappAttributeDefinitions || []).each { |attribute| - if destinationKappAttributeArray.include?(attribute['name']) - space_sdk.update_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceKappAttributeArray.push(attribute['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Attribute Definitions - # ------------------------------------------------------------------------------ - destinationKappAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceKappAttributeArray.include?(attribute) - space_sdk.delete_kapp_attribute_definition(kapp['slug'],attribute) - end - } - end - - # ------------------------------------------------------------------------------ - # Migrate Kapp Category Definitions - # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categoryAttributeDefinitions.json") - sourceKappCategoryArray = [] - destinationKappAttributeArray = (space_sdk.find_category_attribute_definitions(kapp['slug']).content['categoryAttributeDefinitions'] || {}).map { |definition| definition['name']} - kappCategoryDefinitions = JSON.parse(File.read(file)) - (kappCategoryDefinitions || []).each { |attribute| - if destinationKappAttributeArray.include?(attribute['name']) - space_sdk.update_category_attribute_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_category_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceKappCategoryArray.push(attribute['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Category Definitions - # ------------------------------------------------------------------------------ - destinationKappAttributeArray.each { | attribute | - if !sourceKappCategoryArray.include?(attribute) - space_sdk.delete_category_attribute_definition(kapp['slug'],attribute) - end - } - end - - # ------------------------------------------------------------------------------ - # Migrate Kapp Form Attribute Definitions - # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formAttributeDefinitions.json") - sourceFormAttributeArray = [] - destinationFormAttributeArray = (space_sdk.find_form_attribute_definitions(kapp['slug']).content['formAttributeDefinitions'] || {}).map { |definition| definition['name']} - formAttributeDefinitions = JSON.parse(File.read(file)) - (formAttributeDefinitions || []).each { |attribute| - if destinationFormAttributeArray.include?(attribute['name']) - space_sdk.update_form_attribute_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_form_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceFormAttributeArray.push(attribute['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Form Attribute Definitions - # ------------------------------------------------------------------------------ - destinationFormAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceFormAttributeArray.include?(attribute) - space_sdk.delete_form_attribute_definition(kapp['slug'],attribute) - end - } - end - - # ------------------------------------------------------------------------------ - # Migrate Kapp Form Type Definitions - # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formTypes.json") - sourceFormTypesArray = [] - destinationFormTypesArray = (space_sdk.find_formtypes(kapp['slug']).content['formTypes'] || {}).map { |formTypes| formTypes['name']} - formTypes = JSON.parse(File.read(file)) - (formTypes || []).each { |body| - if destinationFormTypesArray.include?(body['name']) - space_sdk.update_formtype(kapp['slug'], body['name'], body) - else - space_sdk.add_formtype(kapp['slug'], body) - end - sourceFormTypesArray.push(body['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Form Type Definitions - # ------------------------------------------------------------------------------ - destinationFormTypesArray.each { | name | - if vars["options"]["delete"] && !sourceFormTypesArray.include?(name) - space_sdk.delete_formtype(kapp['slug'],name) - end - } - end - - # ------------------------------------------------------------------------------ - # Migrate Kapp Security Policy Definitions - # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/securityPolicyDefinitions.json") - sourceSecurtyPolicyArray = [] - destinationSecurtyPolicyArray = (space_sdk.find_security_policy_definitions(kapp['slug']).content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} - securityPolicyDefinitions = JSON.parse(File.read(file)) - (securityPolicyDefinitions || []).each { |attribute| - if destinationSecurtyPolicyArray.include?(attribute['name']) - space_sdk.update_security_policy_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_security_policy_definition(kapp['slug'], attribute) - end - sourceSecurtyPolicyArray.push(attribute['name']) - } - - destinationSecurtyPolicyArray.each { | attribute | - if vars["options"]["delete"] && !sourceSecurtyPolicyArray.include?(attribute) - space_sdk.delete_security_policy_definition(kapp['slug'],attribute) - end - } - end - - # ------------------------------------------------------------------------------ - # Migrate Kapp Categories - # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categories.json") - sourceCategoryArray = [] - destinationCategoryArray = (space_sdk.find_categories(kapp['slug']).content['categories'] || {}).map { |definition| definition['slug']} - categories = JSON.parse(File.read(file)) - (categories || []).each { |attribute| - if destinationCategoryArray.include?(attribute['slug']) - space_sdk.update_category_on_kapp(kapp['slug'], attribute['slug'], attribute) - else - space_sdk.add_category_on_kapp(kapp['slug'], attribute) - end - sourceCategoryArray.push(attribute['slug']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Categories - # ------------------------------------------------------------------------------ - - destinationCategoryArray.each { | attribute | - if !sourceCategoryArray.include?(attribute) - space_sdk.delete_category_on_kapp(kapp['slug'],attribute) - end - } - end - - # ------------------------------------------------------------------------------ - # import space webhooks - # ------------------------------------------------------------------------------ - sourceSpaceWebhooksArray = [] - destinationSpaceWebhooksArray = (space_sdk.find_webhooks_on_space().content['webhooks'] || {}).map{ |webhook| webhook['name']} - - Dir["#{core_path}/space/webhooks/*.json"].each{ |file| - webhook = JSON.parse(File.read(file)) - if destinationSpaceWebhooksArray.include?(webhook['name']) - space_sdk.update_webhook_on_space(webhook['name'], webhook) - elsif - space_sdk.add_webhook_on_space(webhook) - end - sourceSpaceWebhooksArray.push(webhook['name']) - } - - # ------------------------------------------------------------------------------ - # delete space webhooks - # TODO: A method doesn't exist for deleting the webhook - # ------------------------------------------------------------------------------ - - destinationSpaceWebhooksArray.each do |webhook| - if vars["options"]["delete"] && !sourceSpaceWebhooksArray.include?(webhook) - space_sdk.delete_webhook_on_space(webhook) - end - end - - # ------------------------------------------------------------------------------ - # Migrate Kapp Webhooks - # ------------------------------------------------------------------------------ - sourceWebhookArray = [] - webhooks_on_kapp = space_sdk.find_webhooks_on_kapp(kapp['slug']) - - if webhooks_on_kapp.code=="200" - destinationWebhookArray = (webhooks_on_kapp.content['webhooks'] || {}).map { |definition| definition['name']} - Dir["#{core_path}/space/kapps/#{kapp['slug']}/webhooks/*.json"].each{ |webhookFile| - webhookDef = JSON.parse(File.read(webhookFile)) - if destinationWebhookArray.include?(webhookDef['name']) - space_sdk.update_webhook_on_kapp(kapp['slug'], webhookDef['name'], webhookDef) - else - space_sdk.add_webhook_on_kapp(kapp['slug'], webhookDef) - end - sourceWebhookArray.push(webhookDef['name']) - } - - # ------------------------------------------------------------------------------ - # Delete Kapp Webhooks - # ------------------------------------------------------------------------------ - destinationWebhookArray.each { | attribute | - if vars["options"]["delete"] && !sourceWebhookArray.include?(attribute) - space_sdk.delete_webhook_on_kapp(kapp['slug'],attribute) - end - } - end - - - # ------------------------------------------------------------------------------ - # Add Kapp Forms - # ------------------------------------------------------------------------------ - - if (forms = Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/*.json"]).length > 0 - sourceForms = [] #From import data - destinationForms = (space_sdk.find_forms(kapp['slug']).content['forms'] || {}).map{ |form| form['slug']} - forms.each { |form| - properties = File.read(form) - form = JSON.parse(properties) - sourceForms.push(form['slug']) - if destinationForms.include?(form['slug']) - space_sdk.update_form(kapp['slug'] ,form['slug'], form) - else - space_sdk.add_form(kapp['slug'], form) - end - } - # ------------------------------------------------------------------------------ - # delete forms - # ------------------------------------------------------------------------------ - destinationForms.each { |slug| - if vars["options"]["delete"] && !sourceForms.include?(slug) - #Delete form is disabled - #space_sdk.delete_form(kapp['slug'], slug) - end - } - end - - # ------------------------------------------------------------------------------ - # Import Kapp Form Data - # ------------------------------------------------------------------------------ - Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/**/submissions*.ndjson"].sort.each { |filename| - dir = File.dirname(filename) - form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] - - # This code could delete all submissions form the form before importing new data - # It is commented out because it could be dangerous to have in place and the delete_submission method doesn't exist currently. - #(space_sdk.find_all_form_submissions(kapp['slug'], form_slug).content['submissions'] || []).each { |submission| - # space_sdk.delete_submission(submission['id']) - #} - - File.readlines(filename).each { |line| - submission = JSON.parse(line) - submission["values"].map { |field, value| - # if the value contains an array of files - if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') - value.map.with_index { |file, index| - # add 'path' key to the attribute value indicating the location of the attachment - file['path'] = "#{dir}#{file['path']}" - } - end - } - body = { - "values" => submission["values"], - "coreState" => submission["coreState"] - } - space_sdk.add_submission(kapp['slug'], form_slug, body).content - } - } - # ------------------------------------------------------------------------------ - # Add Kapp Web APIs - # ------------------------------------------------------------------------------ - sourceWebApisArray = [] - destinationWebApisArray = (space_sdk.find_kapp_webapis(kapp['slug']).content['webApis'] || {}).map { |definition| definition['slug']} - Dir["#{core_path}/space/kapps/#{kapp['slug']}/webApis/*"].each { |webApi| - body = JSON.parse(File.read(webApi)) - if destinationWebApisArray.include?(body['slug']) - space_sdk.update_kapp_webapi(kapp['slug'], body['slug'], body) - else - space_sdk.add_kapp_webapi(kapp['slug'], body) - end - sourceWebApisArray.push(body['slug']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Web APIs - # ------------------------------------------------------------------------------ - destinationWebApisArray.each { | webApi | - if vars["options"]["delete"] && !sourceWebApisArray.include?(webApi) - space_sdk.delete_kapp_webapi(kapp['slug'], webApi) - end - } -} - -# ------------------------------------------------------------------------------ -# task -# ------------------------------------------------------------------------------ - -task_sdk = KineticSdk::Task.new({ - app_server_url: "#{vars["task"]["server_url"]}", - username: vars["task"]["service_user_username"], - password: vars["task"]["service_user_password"], - options: http_options.merge({ export_directory: "#{task_path}" }) -}) - -# ------------------------------------------------------------------------------ -# task import -# ------------------------------------------------------------------------------ - -logger.info "Importing the task components for the \"#{template_name}\" template." -logger.info " importing with api: #{task_sdk.api_url}" - -# ------------------------------------------------------------------------------ -# task handlers -# ------------------------------------------------------------------------------ - -# import handlers forcing overwrite -task_sdk.import_handlers(true) - -# ------------------------------------------------------------------------------ -# Import Task Trees and Routines -# ------------------------------------------------------------------------------ - -# import routines and force overwrite -task_sdk.import_routines(true) -# import trees and force overwrite -task_sdk.import_trees(true) - - - -# ------------------------------------------------------------------------------ -# import task categories -# ------------------------------------------------------------------------------ - -sourceCategories = [] #From import data -destinationCategories = (task_sdk.find_categories().content['categories'] || {}).map{ |category| category['name']} - -Dir["#{task_path}/categories/*.json"].each { |file| - category = JSON.parse(File.read(file)) - sourceCategories.push(category['name']) - if destinationCategories.include?(category['name']) - task_sdk.update_category(category['name'], category) - else - task_sdk.add_category(category) - end -} - -# ------------------------------------------------------------------------------ -# delete task categories -# ------------------------------------------------------------------------------ - -destinationCategories.each { |category| - if vars["options"]["delete"] && !sourceCategories.include?(category) - task_sdk.delete_category(category) - end -} - -# ------------------------------------------------------------------------------ -# import task policy rules -# ------------------------------------------------------------------------------ - -destinationPolicyRuleArray = task_sdk.find_policy_rules().content['policyRules'] -sourcePolicyRuleArray = Dir["#{task_path}/policyRules/*.json"].map{ |file| - rule = JSON.parse(File.read(file)) - {"name" => rule['name'], "type" => rule['type']} - } - -Dir["#{task_path}/policyRules/*.json"].each { |file| - rule = JSON.parse(File.read(file)) - if !destinationPolicyRuleArray.find {|dest_rule| dest_rule['name']==rule['name'] && dest_rule['type']==rule['type'] }.nil? - task_sdk.update_policy_rule(rule.slice('type', 'name'), rule) - else - task_sdk.add_policy_rule(rule) - end -} - -# ------------------------------------------------------------------------------ -# delete task policy rules -# ------------------------------------------------------------------------------ -destinationPolicyRuleArray.each { |rule| - if vars["options"]["delete"] && sourcePolicyRuleArray.find {|source_rule| source_rule['name']==rule['name'] && source_rule['type']==rule['type'] }.nil? - task_sdk.delete_policy_rule(rule) - end -} - -# ------------------------------------------------------------------------------ -# Delete Trees and Routines not in the Source Data -# ------------------------------------------------------------------------------ - -# identify Trees and Routines on destination -destinationtrees = [] -trees = task_sdk.find_trees().content -(trees['trees'] || []).each { |tree| - destinationtrees.push( tree['title'] ) -} - -# identify Routines in source data -sourceTrees = [] -Dir["#{task_path}/routines/*.xml"].each {|routine| - doc = Document.new(File.new(routine)) - root = doc.root - sourceTrees.push("#{root.elements["taskTree/name"].text}") -} -# identify trees in source data -Dir["#{task_path}/sources/*"].each {|source| - if File.directory? source - Dir["#{source}/trees/*.xml"].each { |tree| - doc = Document.new(File.new(tree)) - root = doc.root - tree = "#{root.elements["sourceName"].text} :: #{root.elements["sourceGroup"].text} :: #{root.elements["taskTree/name"].text}" - sourceTrees.push(tree) - } - end -} - -# Delete the extra tress and routines on the source -destinationtrees.each { | tree | - if vars["options"]["delete"] && !sourceTrees.include?(tree) - treeDef = tree.split(' :: ') - task_sdk.delete_tree( tree ) - end -} - -# ------------------------------------------------------------------------------ -# complete -# ------------------------------------------------------------------------------ - -logger.info "Finished importing the \"#{template_name}\" forms." diff --git a/config/servername_environment_import_config.yml b/config/servername_environment_import_config.yml index 056d78d..0db6f6f 100644 --- a/config/servername_environment_import_config.yml +++ b/config/servername_environment_import_config.yml @@ -4,6 +4,7 @@ core: server_url: https://web-server.com space_slug: space_name: + old_space_slug: service_user_username: service_user_password: options: diff --git a/export.rb b/export.rb index 6686076..8e4df6d 100644 --- a/export.rb +++ b/export.rb @@ -222,16 +222,28 @@ def SecurePWD(file,vars,pwdAttribute) end enc = Base64.strict_encode64(password) vars[pwdAttribute]["service_user_password"] = enc.to_s - File.open(file, 'w') {|f| f.write vars.to_yaml} + begin + file = File.open(file, 'w') + file.write vars.to_yaml + #{ |f| f.write vars.to_yaml } + rescue + logger.error("We crashed!") + ensure + file.close + end return password end +##TODO - This didn't write correctly - It set first as a string in quotes, second as encoded +##TODO - This didn't read a plaintext and encode for me #Setup secure pwd function - Checks for nil and prompts for pwd, then b64 encodes and writes to yml if !vars["core"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["core"]["service_user_password"])) != vars["core"]["service_user_password"] - vars["core"]["service_user_password"] = SecurePWD(file,vars,"core") + SecurePWD(file,vars,"core") + logger.info("Core pwd encoded") end if !vars["task"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["task"]["service_user_password"])) != vars["task"]["service_user_password"] - vars["task"]["service_user_password"] = SecurePWD(file,vars,"task") + SecurePWD(file,vars,"task") + logger.info("Task pwd encoded") end #Write PT pwds into local variable @@ -265,11 +277,6 @@ def SecurePWD(file,vars,pwdAttribute) core_path = File.join(platform_template_path, "exports", folderName, "core") task_path = File.join(platform_template_path, "exports", folderName, "task") -puts "Core #{core_path}" -puts "Task #{task_path}" -gets -exit - # Output the yml file config logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" diff --git a/import.rb b/import.rb index 7f595de..9c6972e 100644 --- a/import.rb +++ b/import.rb @@ -131,6 +131,14 @@ logger.info "Importing using the config: #{JSON.pretty_generate(vars)}" +#Decode password to utilize +def DecodePWD(pwdAttribute) + return Base64.decode64(pwdAttribute) +end + +vars["core"]["service_user_password"] = DecodePWD(vars["core"]["service_user_password"]) +vars["task"]["service_user_password"] = DecodePWD(vars["task"]["service_user_password"]) + space_sdk = KineticSdk::Core.new({ space_server_url: vars["core"]["server_url"], From 33cfa467af52baca84827d85cc7f0198d784961c Mon Sep 17 00:00:00 2001 From: Brian Peterson Date: Fri, 15 Dec 2023 14:39:31 -0600 Subject: [PATCH 10/19] Updated how the attachement URL is created --- export.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/export.rb b/export.rb index 8e4df6d..50b075b 100644 --- a/export.rb +++ b/export.rb @@ -437,7 +437,8 @@ def SecurePWD(file,vars,pwdAttribute) # dir and file name to write attachment download_path = "#{download_dir}/#{File.join(".", attachment['name'])}" # url to retrieve the attachment - url = URI.escape("#{attachment_base_url}/submissions/#{submission_id}/files/#{field}/#{index}/#{attachment['name']}") + logger.info ("#{attachment_base_url}/submissions/#{submission_id}/files/#{field}/#{index}/#{attachment['name']}") + url = "#{attachment_base_url}/submissions/#{submission_id}/files/#{ERB::Util.url_encode(field)}/#{index}/#{ERB::Util.url_encode(attachment['name'])}" # retrieve and write attachment space_sdk.stream_download_to_file(download_path, url, {}, space_sdk.default_headers) # add the "path" key to indicate the attachment's location From 1c1040a87d3ce1910232d7575abfb7651fcc1d98 Mon Sep 17 00:00:00 2001 From: Brian Peterson Date: Fri, 15 Dec 2023 14:41:49 -0600 Subject: [PATCH 11/19] Removed Logging Statement --- export.rb | 1 - 1 file changed, 1 deletion(-) diff --git a/export.rb b/export.rb index 50b075b..822f6c8 100644 --- a/export.rb +++ b/export.rb @@ -437,7 +437,6 @@ def SecurePWD(file,vars,pwdAttribute) # dir and file name to write attachment download_path = "#{download_dir}/#{File.join(".", attachment['name'])}" # url to retrieve the attachment - logger.info ("#{attachment_base_url}/submissions/#{submission_id}/files/#{field}/#{index}/#{attachment['name']}") url = "#{attachment_base_url}/submissions/#{submission_id}/files/#{ERB::Util.url_encode(field)}/#{index}/#{ERB::Util.url_encode(attachment['name'])}" # retrieve and write attachment space_sdk.stream_download_to_file(download_path, url, {}, space_sdk.default_headers) From 210b3b93f8b02911cdc139a8a353d4457820d814 Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Fri, 5 Jan 2024 11:07:41 -0600 Subject: [PATCH 12/19] Overhaul to encoding/decoding to correct bug, added to import.rb --- export.rb | 101 +++++++++++++++++++------- import.rb | 211 ++++++++++++++++++++++++++++++++++++++++++++++-------- 2 files changed, 259 insertions(+), 53 deletions(-) diff --git a/export.rb b/export.rb index 822f6c8..8195780 100644 --- a/export.rb +++ b/export.rb @@ -53,6 +53,7 @@ require 'base64' #For pwd encoding template_name = "platform-template" +$pwdFields = ["core","task"] logger = Logger.new(STDERR) logger.level = Logger::INFO @@ -62,6 +63,7 @@ end + # Determine the Present Working Directory pwd = File.expand_path(File.dirname(__FILE__)) @@ -69,8 +71,6 @@ # The options specified on the command line will be collected in *options*. options = {} OptionParser.new do |opts| - break if opts === '' #If no arguments provided, then break out and move to selection - #TODO - See if this can be more elegant or if this is the best we have opts.banner = "Usage: example.rb [options]" opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| options["CONFIG_FILE"] = config @@ -78,11 +78,10 @@ # No argument, shows at tail. This will print an options summary. # Try it and see! - opts.on("-h", "--help", "Show this message") do + opts.on_tail("-h", "--help", "Show this message") do puts opts exit end - end.parse! @@ -102,13 +101,15 @@ def config_selection(config_folder_path, logger) config_exts = ['.yaml','.yml'] configArray = [] logger.info "Checking #{config_folder_path} for config files" + #Check config folder for yaml/yml files containing the word 'export' begin Find.find("#{config_folder_path}/") do |file| configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('export')) end - rescue + rescue error #No config files found in config folder - logger.info "Error finding default config file path!" + logger.error "Error finding default config file path!" + logger.error "Error reported: #{error}" puts "Cannot find config files in default path! (#{pwd})" puts "Exiting script..." gets @@ -200,6 +201,7 @@ def remove_discussion_id_attribute(model) end rescue => error logger.info error + logger.info "Exiting..." exit end @@ -209,46 +211,97 @@ def remove_discussion_id_attribute(model) rescue => error logger.info "Error loading YAML configuration" logger.info error + logger.info "Exiting..." + gets exit end logger.info "Configuration file passed validation." + +#Check if nil/unencoded and update accordingly def SecurePWD(file,vars,pwdAttribute) #If no pwd, then ask for one, otherwise take current string that was not found to be B64 and convert if vars[pwdAttribute]["service_user_password"].nil? password = IO::console.getpass "Enter Password(#{pwdAttribute}): " else - password =vars[pwdAttribute]["service_user_password"] + password = vars[pwdAttribute]["service_user_password"] end enc = Base64.strict_encode64(password) vars[pwdAttribute]["service_user_password"] = enc.to_s begin - file = File.open(file, 'w') - file.write vars.to_yaml + fileObj = File.open(file, 'w') + puts "Updated pwd in #{pwdAttribute} to #{enc}" + fileObj.write vars.to_yaml #{ |f| f.write vars.to_yaml } - rescue - logger.error("We crashed!") + rescue ArgumentError + logger.error("There was an error while updating variables file:") + logger.error(ArgumentError) ensure - file.close + fileObj.close end - return password end -##TODO - This didn't write correctly - It set first as a string in quotes, second as encoded -##TODO - This didn't read a plaintext and encode for me -#Setup secure pwd function - Checks for nil and prompts for pwd, then b64 encodes and writes to yml -if !vars["core"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["core"]["service_user_password"])) != vars["core"]["service_user_password"] - SecurePWD(file,vars,"core") - logger.info("Core pwd encoded") +#Decode password to utilize +def DecodePWD(file, vars, pwdLoc) + pwdAttribute = vars[pwdLoc]["service_user_password"] + # if !pwdAttribute.is_a?(String) || Base64.strict_encode64(Base64.decode64(pwdAttribute)) != pwdAttribute || pwdAttribute === "" + # puts "Adjusting password for #{pwdLoc}" + # SecurePWD(file, vars, pwdLoc) + # pwdAttribute = vars[pwdLoc]["service_user_password"] + # end + return Base64.decode64(pwdAttribute) end -if !vars["task"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["task"]["service_user_password"])) != vars["task"]["service_user_password"] - SecurePWD(file,vars,"task") - logger.info("Task pwd encoded") + +#Confirm passwords exist and are in a proper format, call SecurePWD for any exceptions +def ValidatePWD(file, vars) + $pwdFields.each do |field| + t = vars[field]["service_user_password"] + #See if not a string, not encoded, or default + if !t.is_a?(String) || Base64.strict_encode64(Base64.decode64(t)) != t || t === "" + puts "Updating password #{t}" + SecurePWD(file, vars, field) + end + end end +#OLD +#Setup secure pwd function - If entry is not a string(nil) or unencoded, then run pwd encoder/prompt +# if !vars["core"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["core"]["service_user_password"])) != vars["core"]["service_user_password"] +# SecurePWD(file,vars,"core") +# logger.info("Core pwd encoded") +# end +# if !vars["task"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["task"]["service_user_password"])) != vars["task"]["service_user_password"] +# SecurePWD(file,vars,"task") +# logger.info("Task pwd encoded") +# end #Write PT pwds into local variable -vars["core"]["service_user_password"] = Base64.decode64(vars["core"]["service_user_password"]) -vars["task"]["service_user_password"] = Base64.decode64(vars["task"]["service_user_password"]) +# vars["core"]["service_user_password"] = Base64.decode64(vars["core"]["service_user_password"]) +# vars["task"]["service_user_password"] = Base64.decode64(vars["task"]["service_user_password"]) + +#NEW +#Will confirm there is a valid, encoded password and decode. Otherwise it will prompt/encode pwd and return decoded variant + +#Temporary workaround - run decode and trash first time to ensure improper data written - Otherwise first action will store decoded var and 2nd will write it to the config +ValidatePWD(file, vars) +#TODO - Review whether or not vars is handed in or just the pwd value +vars["core"]["service_user_password"] = DecodePWD(file, vars, "core") +vars["task"]["service_user_password"] = DecodePWD(file, vars, "task") +puts "Pause here..." +gets +exit + +if vars["core"]["service_user_password"].empty? || vars["core"]["service_user_password"].nil? + puts "Core password is blank! Password required. Exiting..." + gets + exit +end +if vars["task"]["service_user_password"].empty? || vars["task"]["service_user_password"].nil? + puts "Task password is blank! Password required. Exiting..." + gets + exit +end + + # Set http_options based on values provided in the config file. http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| diff --git a/import.rb b/import.rb index 9c6972e..205af6b 100644 --- a/import.rb +++ b/import.rb @@ -36,12 +36,15 @@ log_output: stderr =end -require 'logger' +require 'logger' #For System Logging require 'json' require 'rexml/document' -require 'optparse' +require 'optparse' #For argument parsing require 'kinetic_sdk' +require 'Find' #For config list building include REXML +require 'io/console' #For password request +require 'base64' #For pwd encoding template_name = "platform-template" @@ -57,13 +60,12 @@ # Determine the Present Working Directory pwd = File.expand_path(File.dirname(__FILE__)) -ARGV << '-h' if ARGV.empty? +# ARGV << '-h' if ARGV.empty? # The options specified on the command line will be collected in *options*. options = {} OptionParser.new do |opts| opts.banner = "Usage: example.rb [options]" - opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| options["CONFIG_FILE"] = config end @@ -76,13 +78,79 @@ end end.parse! -#Now raise an exception if we have not found a CONFIG_FILE option -raise OptionParser::MissingArgument if options["CONFIG_FILE"].nil? +#Configuration Selection +def config_selection(config_folder_path, logger) + + #Ensure config folder exists + if !File.directory?(config_folder_path) + logger.info "Config folder not found at #{config_folder_path}" + puts "Cannot find config folder!" + puts "Exiting..." + gets + exit + end + + # #Determine Config file to use + config_exts = ['.yaml','.yml'] + configArray = [] + logger.info "Checking #{config_folder_path} for config files" + #Check config folder for yaml/yml files containing the word 'import' + begin + Find.find("#{config_folder_path}/") do |file| + configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('import')) + end + rescue error + #No config files found in config folder + logger.error "Error finding default config file path!" + logger.error "Error reported: #{error}" + puts "Cannot find config files in default path! (#{pwd})" + puts "Exiting script..." + gets + exit + end + logger.info "Found config files" + + #Print config file options with number indicators to select + puts "Select your config file" + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + logger.info "Select section" + begin + print "Selection (0 to repeat options): " + sel = gets.chomp.to_i + begin + if sel === 0 + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + next + end + configFile = configArray[sel-1] + logger.info "Option #{sel} - #{configFile}" + break + rescue + logger.info "Error selecting config file! Exiting..." + puts "Error selecting config file!" + puts "Exiting..." + gets + exit + end + end while true + return configFile +end + + +#End method # determine the directory paths platform_template_path = File.dirname(File.expand_path(__FILE__)) +config_folder_path = File.join(platform_template_path,'config') +if options["CONFIG_FILE"].nil? + options["CONFIG_FILE"] = config_selection(config_folder_path, logger) +end # ------------------------------------------------------------------------------ # methods @@ -95,7 +163,6 @@ # ------------------------------------------------------------------------------ - # ------------------------------------------------------------------------------ # setup # ------------------------------------------------------------------------------ @@ -103,42 +170,128 @@ logger.info "Installing gems for the \"#{template_name}\" template." Dir.chdir(platform_template_path) { system("bundle", "install") } - - -# ------------------------------------------------------------------------------ -# core -# ------------------------------------------------------------------------------ vars = {} +file = "#{platform_template_path}/#{options['CONFIG_FILE']}" + +# Check if configuration file exists +logger.info "Validating configuration file." +begin + if File.exist?(file) != true + file = "#{config_folder_path}/#{options['CONFIG_FILE']}" + if File.exist?(file) != true + raise "The file \"#{options['CONFIG_FILE']}\" does not exist in the base or config directories." + end + end +rescue => error + logger.info error + logger.info "Exiting..." + exit +end + # Read the config file specified in the command line into the variable "vars" -if File.file?(file = "#{platform_template_path}/#{options['CONFIG_FILE']}") - vars.merge!( YAML.load(File.read("#{platform_template_path}/#{options['CONFIG_FILE']}")) ) -elsif - raise "Config file not found: #{file}" +begin + vars.merge!( YAML.load(File.read(file)) ) +rescue => error + logger.info "Error loading YAML configuration" + logger.info error + logger.info "Exiting..." + gets + exit end +logger.info "Configuration file passed validation." + +#Check if nil/unencoded and update accordingly +def SecurePWD(file,vars,pwdAttribute) + #If no pwd, then ask for one, otherwise take current string that was not found to be B64 and convert + if vars[pwdAttribute]["service_user_password"].nil? + password = IO::console.getpass "Enter Password(#{pwdAttribute}): " + else + password = vars[pwdAttribute]["service_user_password"] + end + enc = Base64.strict_encode64(password) + vars[pwdAttribute]["service_user_password"] = enc.to_s + begin + fileObj = File.open(file, 'w') + puts "Updated pwd in #{pwdAttribute} to #{enc}" + fileObj.write vars.to_yaml + #{ |f| f.write vars.to_yaml } + rescue ArgumentError + logger.error("There was an error while updating variables file:") + logger.error(ArgumentError) + ensure + fileObj.close + end +end + +#Decode password to utilize +def DecodePWD(file, vars, pwdLoc) + pwdAttribute = vars[pwdLoc]["service_user_password"] + # if !pwdAttribute.is_a?(String) || Base64.strict_encode64(Base64.decode64(pwdAttribute)) != pwdAttribute || pwdAttribute === "" + # puts "Adjusting password for #{pwdLoc}" + # SecurePWD(file, vars, pwdLoc) + # pwdAttribute = vars[pwdLoc]["service_user_password"] + # end + return Base64.decode64(pwdAttribute) +end + +#Confirm passwords exist and are in a proper format, call SecurePWD for any exceptions +def ValidatePWD(file, vars) + $pwdFields.each do |field| + t = vars[field]["service_user_password"] + #See if not a string, not encoded, or default + if !t.is_a?(String) || Base64.strict_encode64(Base64.decode64(t)) != t || t === "" + puts "Updating password #{t}" + SecurePWD(file, vars, field) + end + end +end + +ValidatePWD(file, vars) +#Will confirm there is a valid, encoded password and decode. Otherwise it will prompt/encode pwd and return decoded variant +vars["core"]["service_user_password"] = DecodePWD(file, vars,"core") +vars["task"]["service_user_password"] = DecodePWD(file, vars, "task") + + +if vars["core"]["service_user_password"].empty? || vars["core"]["service_user_password"].nil? + puts "Core password is blank! Password required. Exiting..." + gets + exit +end +if vars["task"]["service_user_password"].empty? || vars["task"]["service_user_password"].nil? + puts "Task password is blank! Password required. Exiting..." + gets + exit +end + -#Setting core paths -core_path = File.join(platform_template_path, "exports", vars['core']['old_space_slug'], "core") -task_path = File.join(platform_template_path, "exports", vars['core']['old_space_slug'], "task") # Set http_options based on values provided in the config file. http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| result[k.to_sym] = v end -# Set option values to default values if not included -vars["options"] = !vars["options"].nil? ? vars["options"] : {} -vars["options"]["delete"] = !vars["options"]["delete"].nil? ? vars["options"]["delete"] : false - -logger.info "Importing using the config: #{JSON.pretty_generate(vars)}" +#Config exports folder exists, if not then create +if !File.directory?(File.join(platform_template_path,"exports")) + Dir.mkdir(File.join(platform_template_path, "exports")) +end -#Decode password to utilize -def DecodePWD(pwdAttribute) - return Base64.decode64(pwdAttribute) +#Setting core paths utilzing variables +if !vars['core']['space_slug'].nil? + folderName = vars['core']['space_slug'] +elsif !vars['core']['space_name'].nil? + folderName = vars['core']['space_name'] +else + puts "No space slug or name provided! Please provide one in order to export..." + gets + exit end +core_path = File.join(platform_template_path, "exports", folderName, "core") +task_path = File.join(platform_template_path, "exports", folderName, "task") -vars["core"]["service_user_password"] = DecodePWD(vars["core"]["service_user_password"]) -vars["task"]["service_user_password"] = DecodePWD(vars["task"]["service_user_password"]) +# Output the yml file config +logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" +logger.info "Setting up the SDK" space_sdk = KineticSdk::Core.new({ space_server_url: vars["core"]["server_url"], From 8da94b6b2b500f5a4b951d009811ee12b9ac91c3 Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Fri, 12 Apr 2024 17:49:01 -0500 Subject: [PATCH 13/19] Adjusted logger to global, updated sdk gem --- Gemfile | 2 +- Gemfile.lock | 8 ++-- export.rb | 115 +++++++++++++++++++++------------------------------ import.rb | 8 ++-- 4 files changed, 54 insertions(+), 79 deletions(-) diff --git a/Gemfile b/Gemfile index 6734f78..74bdc1a 100644 --- a/Gemfile +++ b/Gemfile @@ -1,3 +1,3 @@ source 'https://rubygems.org' -gem 'kinetic_sdk', '5.0.22' +gem 'kinetic_sdk', '5.0.26' diff --git a/Gemfile.lock b/Gemfile.lock index 6482959..6d9930f 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,13 +1,13 @@ GEM remote: https://rubygems.org/ specs: - kinetic_sdk (5.0.22) + kinetic_sdk (5.0.23) mime-types (>= 3.3.1) multipart-post (= 2.0.0) slugify (= 1.0.7) - mime-types (3.5.1) + mime-types (3.5.2) mime-types-data (~> 3.2015) - mime-types-data (3.2023.1003) + mime-types-data (3.2024.0206) multipart-post (2.0.0) slugify (1.0.7) @@ -17,7 +17,7 @@ PLATFORMS x64-mingw32 DEPENDENCIES - kinetic_sdk (= 5.0.22) + kinetic_sdk (= 5.0.23) BUNDLED WITH 2.2.5 diff --git a/export.rb b/export.rb index 8195780..d0c39f9 100644 --- a/export.rb +++ b/export.rb @@ -1,3 +1,6 @@ +#TODO work +#Bluestone exported a workflow as "inactive" but it imported as "active" + # RUNNING THE SCRIPT: # ruby export.rb -c "<>" # ruby export.rb -c "config/foo-web-server.rb" @@ -55,9 +58,9 @@ template_name = "platform-template" $pwdFields = ["core","task"] -logger = Logger.new(STDERR) -logger.level = Logger::INFO -logger.formatter = proc do |severity, datetime, progname, msg| +$logger = Logger.new(STDERR) +$logger.level = Logger::INFO +$logger.formatter = proc do |severity, datetime, progname, msg| date_format = datetime.utc.strftime("%Y-%m-%dT%H:%M:%S.%LZ") "[#{date_format}] #{severity}: #{msg}\n" end @@ -86,11 +89,11 @@ #Configuration Selection -def config_selection(config_folder_path, logger) +def config_selection(config_folder_path) #Ensure config folder exists if !File.directory?(config_folder_path) - logger.info "Config folder not found at #{config_folder_path}" + $logger.info "Config folder not found at #{config_folder_path}" puts "Cannot find config folder!" puts "Exiting..." gets @@ -100,7 +103,7 @@ def config_selection(config_folder_path, logger) # #Determine Config file to use config_exts = ['.yaml','.yml'] configArray = [] - logger.info "Checking #{config_folder_path} for config files" + $logger.info "Checking #{config_folder_path} for config files" #Check config folder for yaml/yml files containing the word 'export' begin Find.find("#{config_folder_path}/") do |file| @@ -108,21 +111,21 @@ def config_selection(config_folder_path, logger) end rescue error #No config files found in config folder - logger.error "Error finding default config file path!" - logger.error "Error reported: #{error}" + $logger.error "Error finding default config file path!" + $logger.error "Error reported: #{error}" puts "Cannot find config files in default path! (#{pwd})" puts "Exiting script..." gets exit end - logger.info "Found config files" + $logger.info "Found config files" #Print config file options with number indicators to select puts "Select your config file" configArray.each_with_index do |cFile, index| puts "#{index+1}) #{cFile}" end - logger.info "Select section" + $logger.info "Select section" begin print "Selection (0 to repeat options): " sel = gets.chomp.to_i @@ -134,10 +137,10 @@ def config_selection(config_folder_path, logger) next end configFile = configArray[sel-1] - logger.info "Option #{sel} - #{configFile}" + $logger.info "Option #{sel} - #{configFile}" break rescue - logger.info "Error selecting config file! Exiting..." + $logger.info "Error selecting config file! Exiting..." puts "Error selecting config file!" puts "Exiting..." gets @@ -155,7 +158,7 @@ def config_selection(config_folder_path, logger) config_folder_path = File.join(platform_template_path,'config') if options["CONFIG_FILE"].nil? - options["CONFIG_FILE"] = config_selection(config_folder_path, logger) + options["CONFIG_FILE"] = config_selection(config_folder_path) end # ------------------------------------------------------------------------------ @@ -184,14 +187,14 @@ def remove_discussion_id_attribute(model) # setup # ------------------------------------------------------------------------------ -logger.info "Installing gems for the \"#{template_name}\" template." +$logger.info "Installing gems for the \"#{template_name}\" template." Dir.chdir(platform_template_path) { system("bundle", "install") } vars = {} file = "#{platform_template_path}/#{options['CONFIG_FILE']}" # Check if configuration file exists -logger.info "Validating configuration file." +$logger.info "Validating configuration file." begin if File.exist?(file) != true file = "#{config_folder_path}/#{options['CONFIG_FILE']}" @@ -200,8 +203,8 @@ def remove_discussion_id_attribute(model) end end rescue => error - logger.info error - logger.info "Exiting..." + $logger.info error + $logger.info "Exiting..." exit end @@ -209,13 +212,13 @@ def remove_discussion_id_attribute(model) begin vars.merge!( YAML.load(File.read(file)) ) rescue => error - logger.info "Error loading YAML configuration" - logger.info error - logger.info "Exiting..." + $logger.info "Error loading YAML configuration" + $logger.info error + $logger.info "Exiting..." gets exit end -logger.info "Configuration file passed validation." +$logger.info "Configuration file passed validation." #Check if nil/unencoded and update accordingly @@ -234,21 +237,17 @@ def SecurePWD(file,vars,pwdAttribute) fileObj.write vars.to_yaml #{ |f| f.write vars.to_yaml } rescue ArgumentError - logger.error("There was an error while updating variables file:") - logger.error(ArgumentError) + $logger.error("There was an error while updating variables file:") + $logger.error(ArgumentError) ensure fileObj.close end + #TODO - If you cannot properly write an encoded pwd, exit end #Decode password to utilize def DecodePWD(file, vars, pwdLoc) pwdAttribute = vars[pwdLoc]["service_user_password"] - # if !pwdAttribute.is_a?(String) || Base64.strict_encode64(Base64.decode64(pwdAttribute)) != pwdAttribute || pwdAttribute === "" - # puts "Adjusting password for #{pwdLoc}" - # SecurePWD(file, vars, pwdLoc) - # pwdAttribute = vars[pwdLoc]["service_user_password"] - # end return Base64.decode64(pwdAttribute) end @@ -264,31 +263,9 @@ def ValidatePWD(file, vars) end end -#OLD -#Setup secure pwd function - If entry is not a string(nil) or unencoded, then run pwd encoder/prompt -# if !vars["core"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["core"]["service_user_password"])) != vars["core"]["service_user_password"] -# SecurePWD(file,vars,"core") -# logger.info("Core pwd encoded") -# end -# if !vars["task"]["service_user_password"].is_a?(String) || Base64.strict_encode64(Base64.decode64(vars["task"]["service_user_password"])) != vars["task"]["service_user_password"] -# SecurePWD(file,vars,"task") -# logger.info("Task pwd encoded") -# end -#Write PT pwds into local variable -# vars["core"]["service_user_password"] = Base64.decode64(vars["core"]["service_user_password"]) -# vars["task"]["service_user_password"] = Base64.decode64(vars["task"]["service_user_password"]) - -#NEW -#Will confirm there is a valid, encoded password and decode. Otherwise it will prompt/encode pwd and return decoded variant - -#Temporary workaround - run decode and trash first time to ensure improper data written - Otherwise first action will store decoded var and 2nd will write it to the config ValidatePWD(file, vars) -#TODO - Review whether or not vars is handed in or just the pwd value vars["core"]["service_user_password"] = DecodePWD(file, vars, "core") vars["task"]["service_user_password"] = DecodePWD(file, vars, "task") -puts "Pause here..." -gets -exit if vars["core"]["service_user_password"].empty? || vars["core"]["service_user_password"].nil? puts "Core password is blank! Password required. Exiting..." @@ -331,9 +308,9 @@ def ValidatePWD(file, vars) task_path = File.join(platform_template_path, "exports", folderName, "task") # Output the yml file config -logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" +$logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" -logger.info "Setting up the SDK" +$logger.info "Setting up the SDK" space_sdk = KineticSdk::Core.new({ space_server_url: vars["core"]["server_url"], @@ -356,7 +333,7 @@ def ValidatePWD(file, vars) # Validate Core Connection begin - logger.info "Validating connection to Core \"#{space_sdk.api_url}\"" + $logger.info "Validating connection to Core \"#{space_sdk.api_url}\"" response = space_sdk.me() if response.status == 0 raise response.message @@ -364,13 +341,13 @@ def ValidatePWD(file, vars) raise response.content['error'] end rescue => error - logger.info error + $logger.info error exit end # Validate Task Connection begin - logger.info "Validating connection to Task \"#{task_sdk.api_url}\"" + $logger.info "Validating connection to Task \"#{task_sdk.api_url}\"" response = task_sdk.environment() if response.status == 0 raise response.message @@ -378,26 +355,26 @@ def ValidatePWD(file, vars) raise response.content['error'] end rescue => error - logger.info error + $logger.info error exit end -logger.info "Validating connection to Cors and Task was Successful" +$logger.info "Validating connection to Cors and Task was Successful" # ------------------------------------------------------------------------------ # core # ------------------------------------------------------------------------------ -logger.info "Removing files and folders from the existing \"#{template_name}\" template." +$logger.info "Removing files and folders from the existing \"#{template_name}\" template." FileUtils.rm_rf Dir.glob("#{core_path}/*") -logger.info "Setting up the Core SDK" +$logger.info "Setting up the Core SDK" # fetch export from core service and write to export directory -logger.info "Exporting the core components for the \"#{template_name}\" template." -logger.info " exporting with api: #{space_sdk.api_url}" -logger.info " - exporting configuration data (Kapps,forms, etc)" +$logger.info "Exporting the core components for the \"#{template_name}\" template." +$logger.info " exporting with api: #{space_sdk.api_url}" +$logger.info " - exporting configuration data (Kapps,forms, etc)" space_sdk.export_space # cleanup properties that should not be committed with export @@ -436,10 +413,10 @@ def ValidatePWD(file, vars) end # export submissions -logger.info "Exporting and writing submission data" +$logger.info "Exporting and writing submission data" (SUBMISSIONS_TO_EXPORT || []).delete_if{ |item| item["kappSlug"].nil?}.each do |item| is_datastore = item["datastore"] || false - logger.info "Exporting - #{is_datastore ? 'datastore' : 'kapp'} form #{item['formSlug']}" + $logger.info "Exporting - #{is_datastore ? 'datastore' : 'kapp'} form #{item['formSlug']}" # build directory to write files to submission_path = is_datastore ? "#{core_path}/space/datastore/forms/#{item['formSlug']}" : @@ -507,16 +484,16 @@ def ValidatePWD(file, vars) # close the submissions file file.close() end -logger.info " - submission data export complete" +$logger.info " - submission data export complete" # ------------------------------------------------------------------------------ # task # ------------------------------------------------------------------------------ -logger.info "Removing files and folders from the existing \"#{template_name}\" template." +$logger.info "Removing files and folders from the existing \"#{template_name}\" template." FileUtils.rm_rf Dir.glob("#{task_path}/*") -logger.info "Exporting the task components for the \"#{template_name}\" template." -logger.info " exporting with api: #{task_sdk.api_url}" +$logger.info "Exporting the task components for the \"#{template_name}\" template." +$logger.info " exporting with api: #{task_sdk.api_url}" # export all sources, trees, routines, handlers, # groups, policy rules, categories, and access keys @@ -538,4 +515,4 @@ def ValidatePWD(file, vars) # complete # ------------------------------------------------------------------------------ -logger.info "Finished exporting the \"#{template_name}\" template." +$logger.info "Finished exporting the \"#{template_name}\" template." diff --git a/import.rb b/import.rb index 205af6b..84f91bd 100644 --- a/import.rb +++ b/import.rb @@ -8,6 +8,7 @@ # Teams are not deleted from destination. It could be too dangerous to delete them. # TODO +#Have better validation/notification if you cannot connect (Certificate issue) # RUNNING THE SCRIPT: # ruby import_script.rb -c "<>" @@ -47,6 +48,8 @@ require 'base64' #For pwd encoding template_name = "platform-template" +$pwdFields = ["core","task"] + logger = Logger.new(STDERR) logger.level = Logger::INFO @@ -226,11 +229,6 @@ def SecurePWD(file,vars,pwdAttribute) #Decode password to utilize def DecodePWD(file, vars, pwdLoc) pwdAttribute = vars[pwdLoc]["service_user_password"] - # if !pwdAttribute.is_a?(String) || Base64.strict_encode64(Base64.decode64(pwdAttribute)) != pwdAttribute || pwdAttribute === "" - # puts "Adjusting password for #{pwdLoc}" - # SecurePWD(file, vars, pwdLoc) - # pwdAttribute = vars[pwdLoc]["service_user_password"] - # end return Base64.decode64(pwdAttribute) end From f4181c496b22b3a9dc7af865b02004f5b2893a40 Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Wed, 22 May 2024 13:35:11 -0500 Subject: [PATCH 14/19] Updated notes and gitignore --- .gitignore | 3 ++- Gemfile.lock | 6 +++--- export.rb | 4 ++++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index a6af917..c9c8b10 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,5 @@ config/*.yml !config/servername_environment_export_specific_config.yml exports/* Local_Gems/* -Tools/* \ No newline at end of file +Tools/* +GrabNGoBundle/* \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 6d9930f..1f13b30 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,13 +1,13 @@ GEM remote: https://rubygems.org/ specs: - kinetic_sdk (5.0.23) + kinetic_sdk (5.0.26) mime-types (>= 3.3.1) multipart-post (= 2.0.0) slugify (= 1.0.7) mime-types (3.5.2) mime-types-data (~> 3.2015) - mime-types-data (3.2024.0206) + mime-types-data (3.2024.0305) multipart-post (2.0.0) slugify (1.0.7) @@ -17,7 +17,7 @@ PLATFORMS x64-mingw32 DEPENDENCIES - kinetic_sdk (= 5.0.23) + kinetic_sdk (= 5.0.26) BUNDLED WITH 2.2.5 diff --git a/export.rb b/export.rb index d0c39f9..0ff65e5 100644 --- a/export.rb +++ b/export.rb @@ -22,6 +22,9 @@ SUBMISSIONS_TO_EXPORT: - datastore: true formSlug: + - datastore: false + kappSlug: + formSlug: REMOVE_DATA_PROPERTIES: - createdAt @@ -51,6 +54,7 @@ require 'json' require 'optparse' #For argument parsing require 'kinetic_sdk' +# Note you may need to run "Gem install Kinetic_sdk" require 'Find' #For config list building require 'io/console' #For password request require 'base64' #For pwd encoding From b74526baad97407be44f7268fe023f6c5cd6e6ab Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Tue, 15 Oct 2024 13:41:26 -0500 Subject: [PATCH 15/19] Find gem added to gemfile --- Gemfile | 1 + config/sync.psd1 | 10 ---------- config/sync_spaces.ps1 | 45 ------------------------------------------ export.rb | 3 +-- import.rb | 2 +- 5 files changed, 3 insertions(+), 58 deletions(-) delete mode 100644 config/sync.psd1 delete mode 100644 config/sync_spaces.ps1 diff --git a/Gemfile b/Gemfile index 74bdc1a..45ff9b3 100644 --- a/Gemfile +++ b/Gemfile @@ -1,3 +1,4 @@ source 'https://rubygems.org' gem 'kinetic_sdk', '5.0.26' +gem 'Find', '0.1.1' \ No newline at end of file diff --git a/config/sync.psd1 b/config/sync.psd1 deleted file mode 100644 index e38b590..0000000 --- a/config/sync.psd1 +++ /dev/null @@ -1,10 +0,0 @@ -@{ - Core = @{ - OLDSPACE_SLUG = "bluestone-dev" - NEWSPACE_SLUG = "dmp-kd-test" - } - http_options = @{ - log_level = 'info' - log_output = 'stderr' - } -} diff --git a/config/sync_spaces.ps1 b/config/sync_spaces.ps1 deleted file mode 100644 index 57caa3e..0000000 --- a/config/sync_spaces.ps1 +++ /dev/null @@ -1,45 +0,0 @@ - - - -$Config = Import-LocalizedData -BaseDirectory 'C:\Users\travis.wiese\Source\repos\platform-template\config\' -FileName 'sync.psd1' - -$CURRENT_DIR = (&{If([string]::isnullorempty($PSScriptRoot)) {$pwd.path} else {$PSScriptRoot}}) - - -$OLDSPACE_SLUG = $config['core'].OLDSPACE_SLUG -$NEWSPACE_SLUG = $config['core'].NEWSPACE_SLUG - -$OLDSPACE_PATH = $CURRENT_DIR + '\exports\' + $OLDSPACE_SLUG -$NEWSPACE_PATH = $CURRENT_DIR + '\exports\' + $NEWSPACE_SLUG - -$OLDSPACE_Items = Get-ChildItem $OLDSPACE_PATH -Recurse -$NEWSPACE_Items = Get-ChildItem $NEWSPACE_PATH -Recurse - -$Missing_Files = compare-object -ReferenceObject $OLDSPACE_Items -DifferenceObject $NEWSPACE_Items -ExcludeDifferent -IncludeEqual - - -$NewArr = @() -Foreach($obj in $OLDSPACE_Items) { - $OLDRelPath = $obj.Directoryname.replace("$CURRENT_DIR\exports\$oldspace_slug\",'') - $New_Items = $NEWSPACE_Items | where-object {$_.Directoryname -like "*$OLDRelPath" -and $_.name -eq $obj.name} - try{ - $FolderDifference = Compare-object -ReferenceObject $obj -DifferenceObject $New_Items -Property name - }catch{ - continue - } - - if([string]::isnullorempty($FolderDifference)){ - #Folders match - continue - } - - <# This tracks all unequal folder items #> - $newarr += [pscustomobject] @{ - dir = $OLDRelPath - OldFiles = $obj - NewFiles = $New_Items - } - - - -} \ No newline at end of file diff --git a/export.rb b/export.rb index 0ff65e5..809b82b 100644 --- a/export.rb +++ b/export.rb @@ -53,8 +53,7 @@ require 'logger' #For System Logging require 'json' require 'optparse' #For argument parsing -require 'kinetic_sdk' -# Note you may need to run "Gem install Kinetic_sdk" +require 'kinetic_sdk' # Note you may need to run "Gem install Kinetic_sdk" require 'Find' #For config list building require 'io/console' #For password request require 'base64' #For pwd encoding diff --git a/import.rb b/import.rb index 84f91bd..a22243b 100644 --- a/import.rb +++ b/import.rb @@ -43,7 +43,7 @@ require 'optparse' #For argument parsing require 'kinetic_sdk' require 'Find' #For config list building -include REXML +require 'REXML' require 'io/console' #For password request require 'base64' #For pwd encoding From 02c81a1e5b48607ab2a8a3a4798abd59159625ac Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Mon, 2 Dec 2024 14:45:47 -0600 Subject: [PATCH 16/19] Basic fix for import around trees --- export.rb | 7 +++++- import.rb | 68 ++++++++++++++++++++++++++++++++++--------------------- 2 files changed, 48 insertions(+), 27 deletions(-) diff --git a/export.rb b/export.rb index 809b82b..b3539ed 100644 --- a/export.rb +++ b/export.rb @@ -368,7 +368,7 @@ def ValidatePWD(file, vars) # core # ------------------------------------------------------------------------------ - +##Clear old folder/files $logger.info "Removing files and folders from the existing \"#{template_name}\" template." FileUtils.rm_rf Dir.glob("#{core_path}/*") @@ -415,6 +415,7 @@ def ValidatePWD(file, vars) File.open(filename, 'w') { |file| file.write(JSON.pretty_generate(model)) } end +#TODO - Flag for submissions to export # export submissions $logger.info "Exporting and writing submission data" (SUBMISSIONS_TO_EXPORT || []).delete_if{ |item| item["kappSlug"].nil?}.each do |item| @@ -507,6 +508,8 @@ def ValidatePWD(file, vars) end end +#Is above tied to below? +#TODO - Add flags/logic to have ability to be selective on what's exported/imported task_sdk.export_routines() task_sdk.export_handlers() task_sdk.export_groups() @@ -519,3 +522,5 @@ def ValidatePWD(file, vars) # ------------------------------------------------------------------------------ $logger.info "Finished exporting the \"#{template_name}\" template." + + diff --git a/import.rb b/import.rb index a22243b..1adb2ab 100644 --- a/import.rb +++ b/import.rb @@ -273,8 +273,10 @@ def ValidatePWD(file, vars) Dir.mkdir(File.join(platform_template_path, "exports")) end -#Setting core paths utilzing variables -if !vars['core']['space_slug'].nil? +#Setting core paths utilzing variables - Check old_space_slug -> space_slug -> space_name +if !vars['core']['old_space_slug'].nil? + folderName = vars['core']['old_space_slug'] +elsif !vars['core']['space_slug'].nil? folderName = vars['core']['space_slug'] elsif !vars['core']['space_name'].nil? folderName = vars['core']['space_name'] @@ -1028,31 +1030,45 @@ def ValidatePWD(file, vars) } # identify Routines in source data -sourceTrees = [] -Dir["#{task_path}/routines/*.xml"].each {|routine| - doc = Document.new(File.new(routine)) - root = doc.root - sourceTrees.push("#{root.elements["taskTree/name"].text}") -} -# identify trees in source data -Dir["#{task_path}/sources/*"].each {|source| - if File.directory? source - Dir["#{source}/trees/*.xml"].each { |tree| - doc = Document.new(File.new(tree)) - root = doc.root - tree = "#{root.elements["sourceName"].text} :: #{root.elements["sourceGroup"].text} :: #{root.elements["taskTree/name"].text}" - sourceTrees.push(tree) - } - end -} +begin + sourceTrees = [] + Dir["#{task_path}/routines/*.xml"].each {|routine| + doc = REXML::Document.new(File.new(routine)) + root = doc.root + sourceTrees.push("#{root.elements["taskTree/name"].text}") + } +rescue + logger.error "Error while identifying routines" +end + +begin + # identify trees in source data + Dir["#{task_path}/sources/*"].each {|source| + if File.directory? source + Dir["#{source}/trees/*.xml"].each { |tree| + doc = REXML::Document.new(File.new(tree)) + root = doc.root + tree = "#{root.elements["sourceName"].text} :: #{root.elements["sourceGroup"].text} :: #{root.elements["taskTree/name"].text}" + sourceTrees.push(tree) + } + end + } +rescue + logger.error "Error identifying trees" +end + +begin + # Delete the extra tress and routines on the source + destinationtrees.each { | tree | + if vars["options"]["delete"] && !sourceTrees.include?(tree) + treeDef = tree.split(' :: ') + task_sdk.delete_tree( tree ) + end + } +rescue + logger.error "Error deleting extra trees/routines on source" +end -# Delete the extra tress and routines on the source -destinationtrees.each { | tree | - if vars["options"]["delete"] && !sourceTrees.include?(tree) - treeDef = tree.split(' :: ') - task_sdk.delete_tree( tree ) - end -} # Import v6 workflows as these are not not the same as Trees and Routines logger.info "Importing workflows" From 9ebb225ee4858e527f2a8e968290277377cefb13 Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Wed, 16 Apr 2025 18:16:40 -0500 Subject: [PATCH 17/19] Confirmation countdown for export, space added to import conf --- Gemfile | 9 ++++++++- export.rb | 10 ++++++++++ import.rb | 2 +- 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/Gemfile b/Gemfile index 45ff9b3..eb459a6 100644 --- a/Gemfile +++ b/Gemfile @@ -1,4 +1,11 @@ source 'https://rubygems.org' gem 'kinetic_sdk', '5.0.26' -gem 'Find', '0.1.1' \ No newline at end of file +gem 'Find', '0.1.1' +gem 'logger', '1.4.2' +gem 'json', '2.3.0' +gem 'rexml/document' +gem 'optparse', '0.6.0' +gem 'rexml', '3.2.3.1' +gem 'io/console', '0.5.6' +gem 'base64', '0.2.0' \ No newline at end of file diff --git a/export.rb b/export.rb index b3539ed..f4150b5 100644 --- a/export.rb +++ b/export.rb @@ -310,6 +310,16 @@ def ValidatePWD(file, vars) core_path = File.join(platform_template_path, "exports", folderName, "core") task_path = File.join(platform_template_path, "exports", folderName, "task") + +#Confirmation of space +puts "Exporting #{vars['core']['space_name']} to #{folderName} in 5 seconds..." +4.downto(1) do |n| + puts n + sleep(1) +end + + + # Output the yml file config $logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" diff --git a/import.rb b/import.rb index 1adb2ab..3c4f83a 100644 --- a/import.rb +++ b/import.rb @@ -301,7 +301,7 @@ def ValidatePWD(file, vars) options: http_options.merge({ export_directory: "#{core_path}" }) }) -puts "Are you sure you want to perform an import of data to #{vars["core"]["server_url"]}? [Y/N]" +puts "Are you sure you want to perform an import of data from #{folderName} to #{vars["core"]["server_url"]}? [Y/N]" STDOUT.flush case (gets.downcase.chomp) when 'y' From a5a9e3c0c032b3addffec208fe14ba5a6e103d67 Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Mon, 5 Jan 2026 14:38:51 -0600 Subject: [PATCH 18/19] hotfix to export 1001+ submissions --- export.rb | 116 +++++++++++++++++++++++++++++++++--------------------- 1 file changed, 71 insertions(+), 45 deletions(-) diff --git a/export.rb b/export.rb index f4150b5..ae64fa0 100644 --- a/export.rb +++ b/export.rb @@ -451,52 +451,80 @@ def ValidatePWD(file, vars) # create folder to write submission data to FileUtils.mkdir_p(submission_path, :mode => 0700) + + processed_submissions = false + createdAt = nil + # Iterate submissions in case over 1000 exist + while !processed_submissions do + # build params to pass to the retrieve_form_submissions method + params = {"include" => "details,children,origin,parent,values", "limit" => 1000, "direction" => "ASC"} + if !createdAt.nil? + params["q"] = "createdAt>=\"#{createdAt}\"" + end + # open the submissions file in write mode + file = File.open("#{submission_path}/submissions.ndjson", 'w'); - # build params to pass to the retrieve_form_submissions method - params = {"include" => "details,children,origin,parent,values", "limit" => 1000, "direction" => "ASC"} - - # open the submissions file in write mode - file = File.open("#{submission_path}/submissions.ndjson", 'w'); - - # ensure the file is empty - file.truncate(0) - response = nil - begin - # get submissions from datastore form or form - response = is_datastore ? - space_sdk.find_all_form_datastore_submissions(item['formSlug'], params).content : - space_sdk.find_form_submissions(item['kappSlug'], item['formSlug'], params).content - if response.has_key?("submissions") - # iterate over each submission - (response["submissions"] || []).each do |submission| - # write each attachment to a a dir - submission['values'].select{ |field, value| attachement_files.include?(field)}.each{ |field,value| - submission_id = submission['id'] - # define the dir to contain the attahment - download_dir = "#{submission_path}/#{submission_id}/#{field}" - # evaluate fields with multiple attachments - value.map.with_index{ | attachment, index | - # create folder to write attachment - FileUtils.mkdir_p(download_dir, :mode => 0700) - # dir and file name to write attachment - download_path = "#{download_dir}/#{File.join(".", attachment['name'])}" - # url to retrieve the attachment - url = "#{attachment_base_url}/submissions/#{submission_id}/files/#{ERB::Util.url_encode(field)}/#{index}/#{ERB::Util.url_encode(attachment['name'])}" - # retrieve and write attachment - space_sdk.stream_download_to_file(download_path, url, {}, space_sdk.default_headers) - # add the "path" key to indicate the attachment's location - attachment['path'] = "/#{submission_id}/#{field}/#{attachment['name']}" + # ensure the file is empty + file.truncate(0) + response = nil + begin + # get submissions from datastore form or form + response = is_datastore ? + space_sdk.find_all_form_datastore_submissions(item['formSlug'], params).content : + space_sdk.find_form_submissions(item['kappSlug'], item['formSlug'], params).content + if response.has_key?("submissions") + # File.write("outputtest.txt","#{response}") + # exit + # iterate over each submission + (response["submissions"] || []).each do |submission| + # write each attachment to a a dir + submission['values'].select{ |field, value| attachement_files.include?(field)}.each{ |field,value| + submission_id = submission['id'] + # define the dir to contain the attahment + download_dir = "#{submission_path}/#{submission_id}/#{field}" + # evaluate fields with multiple attachments + value.map.with_index{ | attachment, index | + # create folder to write attachment + FileUtils.mkdir_p(download_dir, :mode => 0700) + # dir and file name to write attachment + download_path = "#{download_dir}/#{File.join(".", attachment['name'])}" + # url to retrieve the attachment + url = "#{attachment_base_url}/submissions/#{submission_id}/files/#{ERB::Util.url_encode(field)}/#{index}/#{ERB::Util.url_encode(attachment['name'])}" + # retrieve and write attachment + space_sdk.stream_download_to_file(download_path, url, {}, space_sdk.default_headers) + # add the "path" key to indicate the attachment's location + attachment['path'] = "/#{submission_id}/#{field}/#{attachment['name']}" + } } - } - # append each submission (removing the submission unwanted attributes) - file.puts(JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)})) + # append each submission (removing the submission unwanted attributes) + file.puts(JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)})) + end end + params['pageToken'] = response['nextPageToken'] + # get next page of submissions if there are more + end while !response.nil? && !response['nextPageToken'].nil? + # close the submissions file + file.close() + # $logger.info "Subs" + + if response["submissions"].count == 1000 + #Check if another batch exists + createdAt = (response["submissions"].last)["createdAt"] + # $logger.info "LastSub: #{response["submissions"].last}" + $logger.info "New created at #{createdAt}" + # $logger.info "Source: #{response["submissions"].last}" + # $logger.info "---------------" + # $logger.info "All #{response["submissions"]}" + # $logger.info "---------------" + # $logger.info "#{response["submissions"].count}" + else + #If not, exit loop + $logger.info "Exiting submission loop" + processed_submissions = true + createdAt = nil end - params['pageToken'] = response['nextPageToken'] - # get next page of submissions if there are more - end while !response.nil? && !response['nextPageToken'].nil? - # close the submissions file - file.close() + + end end $logger.info " - submission data export complete" @@ -531,6 +559,4 @@ def ValidatePWD(file, vars) # complete # ------------------------------------------------------------------------------ -$logger.info "Finished exporting the \"#{template_name}\" template." - - +$logger.info "Finished exporting the \"#{template_name}\" template." \ No newline at end of file From 61ca5a40fdd495dd842ff9ce24bfb20b75a2a728 Mon Sep 17 00:00:00 2001 From: Travis Wiese Date: Wed, 4 Feb 2026 10:56:53 -0600 Subject: [PATCH 19/19] Looping submissions for export, comparison logic for import pending, threading --- export.rb | 89 +-- import.rb | 1914 +++++++++++++++++++++++++++++------------------------ 2 files changed, 1106 insertions(+), 897 deletions(-) diff --git a/export.rb b/export.rb index ae64fa0..d87ea72 100644 --- a/export.rb +++ b/export.rb @@ -333,7 +333,7 @@ def ValidatePWD(file, vars) options: http_options.merge({ export_directory: "#{core_path}" }) }) -task_sdk = KineticSdk::Task.new({ +$task_sdk = KineticSdk::Task.new({ app_server_url: "#{vars["task"]["server_url"]}", username: vars["task"]["service_user_username"], password: vars["task"]["service_user_password"], @@ -346,8 +346,8 @@ def ValidatePWD(file, vars) # Validate Core Connection begin - $logger.info "Validating connection to Core \"#{space_sdk.api_url}\"" - response = space_sdk.me() + $logger.info "Validating connection to Core \"#{$space_sdk.api_url}\"" + response = $space_sdk.me() if response.status == 0 raise response.message elsif response.status.to_s.match(/4\d{2}/) @@ -360,8 +360,8 @@ def ValidatePWD(file, vars) # Validate Task Connection begin - $logger.info "Validating connection to Task \"#{task_sdk.api_url}\"" - response = task_sdk.environment() + $logger.info "Validating connection to Task \"#{$task_sdk.api_url}\"" + response = $task_sdk.environment() if response.status == 0 raise response.message elsif response.status.to_s.match(/4\d{2}/) @@ -386,9 +386,9 @@ def ValidatePWD(file, vars) # fetch export from core service and write to export directory $logger.info "Exporting the core components for the \"#{template_name}\" template." -$logger.info " exporting with api: #{space_sdk.api_url}" +$logger.info " exporting with api: #{$space_sdk.api_url}" $logger.info " - exporting configuration data (Kapps,forms, etc)" -space_sdk.export_space +$space_sdk.export_space # cleanup properties that should not be committed with export # bridge keys @@ -438,40 +438,45 @@ def ValidatePWD(file, vars) # get attachment fields from form definition attachment_form = is_datastore ? - space_sdk.find_datastore_form(item['formSlug'], {"include" => "fields.details"}) : - space_sdk.find_form(item['kappSlug'], item['formSlug'], {"include" => "fields.details"}) + $space_sdk.find_datastore_form(item['formSlug'], {"include" => "fields.details"}) : + $space_sdk.find_form(item['kappSlug'], item['formSlug'], {"include" => "fields.details"}) # get attachment fields from form definition attachement_files = attachment_form.status == 200 ? attachment_form.content['form']['fields'].select{ | file | file['dataType'] == "file" }.map { | field | field['name'] } : {} # set base url for attachments attachment_base_url = is_datastore ? - "#{space_sdk.api_url.gsub("/app/api/v1", "")}/app/datastore" : - "#{space_sdk.api_url.gsub("/app/api/v1", "")}" + "#{$space_sdk.api_url.gsub("/app/api/v1", "")}/app/datastore" : + "#{$space_sdk.api_url.gsub("/app/api/v1", "")}" # create folder to write submission data to FileUtils.mkdir_p(submission_path, :mode => 0700) + + # open the submissions file in write mode + file = File.open("#{submission_path}/submissions.ndjson", 'w'); + # ensure the file is empty + file.truncate(0) + file.close() + file = File.open("#{submission_path}/submissions.ndjson", 'a'); processed_submissions = false - createdAt = nil + createdAt = Time.now + previous = nil + # dataBlock = {} # Iterate submissions in case over 1000 exist - while !processed_submissions do + while !processed_submissions && !createdAt.nil? do # build params to pass to the retrieve_form_submissions method params = {"include" => "details,children,origin,parent,values", "limit" => 1000, "direction" => "ASC"} if !createdAt.nil? params["q"] = "createdAt>=\"#{createdAt}\"" end - # open the submissions file in write mode - file = File.open("#{submission_path}/submissions.ndjson", 'w'); - # ensure the file is empty - file.truncate(0) response = nil begin # get submissions from datastore form or form response = is_datastore ? - space_sdk.find_all_form_datastore_submissions(item['formSlug'], params).content : - space_sdk.find_form_submissions(item['kappSlug'], item['formSlug'], params).content + $space_sdk.find_all_form_datastore_submissions(item['formSlug'], params).content : + $space_sdk.find_form_submissions(item['kappSlug'], item['formSlug'], params).content if response.has_key?("submissions") # File.write("outputtest.txt","#{response}") # exit @@ -491,40 +496,44 @@ def ValidatePWD(file, vars) # url to retrieve the attachment url = "#{attachment_base_url}/submissions/#{submission_id}/files/#{ERB::Util.url_encode(field)}/#{index}/#{ERB::Util.url_encode(attachment['name'])}" # retrieve and write attachment - space_sdk.stream_download_to_file(download_path, url, {}, space_sdk.default_headers) + $space_sdk.stream_download_to_file(download_path, url, {}, $space_sdk.default_headers) # add the "path" key to indicate the attachment's location attachment['path'] = "/#{submission_id}/#{field}/#{attachment['name']}" } } # append each submission (removing the submission unwanted attributes) - file.puts(JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)})) + # dataBlock = dataBlock.merge(JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)})) + json_string = JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)}) + unless json_string == previous + file.puts(json_string) + previous = json_string + end + # file.puts(JSON.generate(submission.delete_if { |key, value| REMOVE_DATA_PROPERTIES.member?(key)})) end end params['pageToken'] = response['nextPageToken'] # get next page of submissions if there are more end while !response.nil? && !response['nextPageToken'].nil? # close the submissions file - file.close() + # file.close() # $logger.info "Subs" if response["submissions"].count == 1000 #Check if another batch exists createdAt = (response["submissions"].last)["createdAt"] # $logger.info "LastSub: #{response["submissions"].last}" - $logger.info "New created at #{createdAt}" - # $logger.info "Source: #{response["submissions"].last}" - # $logger.info "---------------" - # $logger.info "All #{response["submissions"]}" - # $logger.info "---------------" - # $logger.info "#{response["submissions"].count}" + $logger.debug "New created at #{createdAt}" else #If not, exit loop - $logger.info "Exiting submission loop" + $logger.debug "Exiting submission loop" processed_submissions = true createdAt = nil end end + file.close() + #Write to file + # file.puts(dataBlock) end $logger.info " - submission data export complete" @@ -535,25 +544,25 @@ def ValidatePWD(file, vars) FileUtils.rm_rf Dir.glob("#{task_path}/*") $logger.info "Exporting the task components for the \"#{template_name}\" template." -$logger.info " exporting with api: #{task_sdk.api_url}" +$logger.info " exporting with api: #{$task_sdk.api_url}" # export all sources, trees, routines, handlers, # groups, policy rules, categories, and access keys -task_sdk.export_sources() -task_sdk.find_sources().content['sourceRoots'].each do |source| - task_sdk.find_trees({ "source" => source['name'] }).content['trees'].each do |tree| - task_sdk.export_tree(tree['title']) +$task_sdk.export_sources() +$task_sdk.find_sources().content['sourceRoots'].each do |source| + $task_sdk.find_trees({ "source" => source['name'] }).content['trees'].each do |tree| + $task_sdk.export_tree(tree['title']) end end #Is above tied to below? #TODO - Add flags/logic to have ability to be selective on what's exported/imported -task_sdk.export_routines() -task_sdk.export_handlers() -task_sdk.export_groups() -task_sdk.export_policy_rules() -task_sdk.export_categories() -task_sdk.export_access_keys() +$task_sdk.export_routines() +$task_sdk.export_handlers() +$task_sdk.export_groups() +$task_sdk.export_policy_rules() +$task_sdk.export_categories() +$task_sdk.export_access_keys() # ------------------------------------------------------------------------------ # complete diff --git a/import.rb b/import.rb index 3c4f83a..da4527d 100644 --- a/import.rb +++ b/import.rb @@ -36,1046 +36,1246 @@ log_level: info log_output: stderr =end - require 'logger' #For System Logging require 'json' require 'rexml/document' require 'optparse' #For argument parsing -require 'kinetic_sdk' +# require 'kinetic_sdk' require 'Find' #For config list building -require 'REXML' require 'io/console' #For password request require 'base64' #For pwd encoding +require 'concurrent-ruby' -template_name = "platform-template" -$pwdFields = ["core","task"] +$LOAD_PATH.unshift('C:\Users\travis.wiese\Source\repos\kinetic-sdk-rb\lib') +require 'kinetic_sdk' -logger = Logger.new(STDERR) -logger.level = Logger::INFO -logger.formatter = proc do |severity, datetime, progname, msg| - date_format = datetime.utc.strftime("%Y-%m-%dT%H:%M:%S.%LZ") - "[#{date_format}] #{severity}: #{msg}\n" -end -######################################### +def import_space() + template_name = "platform-template" + $pwdFields = ["core","task"] -# Determine the Present Working Directory -pwd = File.expand_path(File.dirname(__FILE__)) -# ARGV << '-h' if ARGV.empty? - -# The options specified on the command line will be collected in *options*. -options = {} -OptionParser.new do |opts| - opts.banner = "Usage: example.rb [options]" - opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| - options["CONFIG_FILE"] = config - end - - # No argument, shows at tail. This will print an options summary. - # Try it and see! - opts.on_tail("-h", "--help", "Show this message") do - puts opts - exit + $logger = Logger.new(STDERR) + $logger.level = Logger::INFO + $logger.formatter = proc do |severity, datetime, progname, msg| + date_format = datetime.utc.strftime("%Y-%m-%dT%H:%M:%S.%LZ") + "[#{date_format}] #{severity}: #{msg}\n" end -end.parse! + ######################################### + # Determine the Present Working Directory + pwd = File.expand_path(File.dirname(__FILE__)) -#Configuration Selection -def config_selection(config_folder_path, logger) + # ARGV << '-h' if ARGV.empty? - #Ensure config folder exists - if !File.directory?(config_folder_path) - logger.info "Config folder not found at #{config_folder_path}" - puts "Cannot find config folder!" - puts "Exiting..." - gets - exit - end - - # #Determine Config file to use - config_exts = ['.yaml','.yml'] - configArray = [] - logger.info "Checking #{config_folder_path} for config files" - #Check config folder for yaml/yml files containing the word 'import' - begin - Find.find("#{config_folder_path}/") do |file| - configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('import')) + # The options specified on the command line will be collected in *options*. + options = {} + OptionParser.new do |opts| + opts.banner = "Usage: example.rb [options]" + opts.on("-c", "--c CONFIG_FILE", "The Configuration file to use") do |config| + options["CONFIG_FILE"] = config end - rescue error - #No config files found in config folder - logger.error "Error finding default config file path!" - logger.error "Error reported: #{error}" - puts "Cannot find config files in default path! (#{pwd})" - puts "Exiting script..." - gets - exit - end - logger.info "Found config files" - - #Print config file options with number indicators to select - puts "Select your config file" - configArray.each_with_index do |cFile, index| - puts "#{index+1}) #{cFile}" - end - logger.info "Select section" - begin - print "Selection (0 to repeat options): " - sel = gets.chomp.to_i - begin - if sel === 0 - configArray.each_with_index do |cFile, index| - puts "#{index+1}) #{cFile}" - end - next - end - configFile = configArray[sel-1] - logger.info "Option #{sel} - #{configFile}" - break - rescue - logger.info "Error selecting config file! Exiting..." - puts "Error selecting config file!" - puts "Exiting..." - gets + + # No argument, shows at tail. This will print an options summary. + # Try it and see! + opts.on_tail("-h", "--help", "Show this message") do + puts opts exit end - end while true - return configFile -end - - -#End method + end.parse! -# determine the directory paths -platform_template_path = File.dirname(File.expand_path(__FILE__)) -config_folder_path = File.join(platform_template_path,'config') + max_threads = 10 + $pool = Concurrent::FixedThreadPool.new(max_threads) + $mutex = Mutex.new + kapps_array = [] + kpromises = [] -if options["CONFIG_FILE"].nil? - options["CONFIG_FILE"] = config_selection(config_folder_path, logger) -end -# ------------------------------------------------------------------------------ -# methods -# ------------------------------------------------------------------------------ -# ------------------------------------------------------------------------------ -# constants -# ------------------------------------------------------------------------------ + #End method + # determine the directory paths + platform_template_path = File.dirname(File.expand_path(__FILE__)) + config_folder_path = File.join(platform_template_path,'config') -# ------------------------------------------------------------------------------ -# setup -# ------------------------------------------------------------------------------ + if options["CONFIG_FILE"].nil? + options["CONFIG_FILE"] = config_selection(config_folder_path) + end -logger.info "Installing gems for the \"#{template_name}\" template." -Dir.chdir(platform_template_path) { system("bundle", "install") } + $logger.info "Installing gems for the \"#{template_name}\" template." + Dir.chdir(platform_template_path) { system("bundle", "install") } -vars = {} -file = "#{platform_template_path}/#{options['CONFIG_FILE']}" + vars = {} + file = "#{platform_template_path}/#{options['CONFIG_FILE']}" -# Check if configuration file exists -logger.info "Validating configuration file." -begin - if File.exist?(file) != true - file = "#{config_folder_path}/#{options['CONFIG_FILE']}" + # Check if configuration file exists + $logger.info "Validating configuration file." + begin if File.exist?(file) != true - raise "The file \"#{options['CONFIG_FILE']}\" does not exist in the base or config directories." + file = "#{config_folder_path}/#{options['CONFIG_FILE']}" + if File.exist?(file) != true + raise "The file \"#{options['CONFIG_FILE']}\" does not exist in the base or config directories." + end end + rescue => error + $logger.info error + $logger.info "Exiting..." + exit end -rescue => error - logger.info error - logger.info "Exiting..." - exit -end - -# Read the config file specified in the command line into the variable "vars" -begin - vars.merge!( YAML.load(File.read(file)) ) -rescue => error - logger.info "Error loading YAML configuration" - logger.info error - logger.info "Exiting..." - gets - exit -end -logger.info "Configuration file passed validation." -#Check if nil/unencoded and update accordingly -def SecurePWD(file,vars,pwdAttribute) - #If no pwd, then ask for one, otherwise take current string that was not found to be B64 and convert - if vars[pwdAttribute]["service_user_password"].nil? - password = IO::console.getpass "Enter Password(#{pwdAttribute}): " - else - password = vars[pwdAttribute]["service_user_password"] - end - enc = Base64.strict_encode64(password) - vars[pwdAttribute]["service_user_password"] = enc.to_s + # Read the config file specified in the command line into the variable "" begin - fileObj = File.open(file, 'w') - puts "Updated pwd in #{pwdAttribute} to #{enc}" - fileObj.write vars.to_yaml - #{ |f| f.write vars.to_yaml } - rescue ArgumentError - logger.error("There was an error while updating variables file:") - logger.error(ArgumentError) - ensure - fileObj.close - end -end - -#Decode password to utilize -def DecodePWD(file, vars, pwdLoc) - pwdAttribute = vars[pwdLoc]["service_user_password"] - return Base64.decode64(pwdAttribute) -end - -#Confirm passwords exist and are in a proper format, call SecurePWD for any exceptions -def ValidatePWD(file, vars) - $pwdFields.each do |field| - t = vars[field]["service_user_password"] - #See if not a string, not encoded, or default - if !t.is_a?(String) || Base64.strict_encode64(Base64.decode64(t)) != t || t === "" - puts "Updating password #{t}" - SecurePWD(file, vars, field) - end + vars.merge!( YAML.load(File.read(file)) ) + rescue => error + $logger.info "Error loading YAML configuration" + $logger.info error + $logger.info "Exiting..." + gets + exit end -end + $logger.info "Configuration file passed validation." -ValidatePWD(file, vars) -#Will confirm there is a valid, encoded password and decode. Otherwise it will prompt/encode pwd and return decoded variant -vars["core"]["service_user_password"] = DecodePWD(file, vars,"core") -vars["task"]["service_user_password"] = DecodePWD(file, vars, "task") -if vars["core"]["service_user_password"].empty? || vars["core"]["service_user_password"].nil? - puts "Core password is blank! Password required. Exiting..." - gets - exit -end -if vars["task"]["service_user_password"].empty? || vars["task"]["service_user_password"].nil? - puts "Task password is blank! Password required. Exiting..." - gets - exit -end + ValidatePWD(file, vars) + #Will confirm there is a valid, encoded password and decode. Otherwise it will prompt/encode pwd and return decoded variant + vars["core"]["service_user_password"] = DecodePWD(file, vars,"core") + vars["task"]["service_user_password"] = DecodePWD(file, vars, "task") + if vars["core"]["service_user_password"].empty? || vars["core"]["service_user_password"].nil? + puts "Core password is blank! Password required. Exiting..." + gets + exit + end + if vars["task"]["service_user_password"].empty? || vars["task"]["service_user_password"].nil? + puts "Task password is blank! Password required. Exiting..." + gets + exit + end -# Set http_options based on values provided in the config file. -http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| - result[k.to_sym] = v -end -#Config exports folder exists, if not then create -if !File.directory?(File.join(platform_template_path,"exports")) - Dir.mkdir(File.join(platform_template_path, "exports")) -end -#Setting core paths utilzing variables - Check old_space_slug -> space_slug -> space_name -if !vars['core']['old_space_slug'].nil? - folderName = vars['core']['old_space_slug'] -elsif !vars['core']['space_slug'].nil? - folderName = vars['core']['space_slug'] -elsif !vars['core']['space_name'].nil? - folderName = vars['core']['space_name'] -else - puts "No space slug or name provided! Please provide one in order to export..." - gets - exit -end -core_path = File.join(platform_template_path, "exports", folderName, "core") -task_path = File.join(platform_template_path, "exports", folderName, "task") - -# Output the yml file config -logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" - -logger.info "Setting up the SDK" - -space_sdk = KineticSdk::Core.new({ - space_server_url: vars["core"]["server_url"], - space_slug: vars["core"]["space_slug"], - username: vars["core"]["service_user_username"], - password: vars["core"]["service_user_password"], - options: http_options.merge({ export_directory: "#{core_path}" }) -}) - -puts "Are you sure you want to perform an import of data from #{folderName} to #{vars["core"]["server_url"]}? [Y/N]" -STDOUT.flush -case (gets.downcase.chomp) -when 'y' - puts "Continuing Import" - STDOUT.flush -else - abort "Exiting Import" -end + # Set http_options based on values provided in the config file. + http_options = (vars["http_options"] || {}).each_with_object({}) do |(k,v),result| + result[k.to_sym] = v + end -################################################################### -# ------------------------------------------------------------------------------ -# Update Space Attributes -# ------------------------------------------------------------------------------ + #Config exports folder exists, if not then create + if !File.directory?(File.join(platform_template_path,"exports")) + Dir.mkdir(File.join(platform_template_path, "exports")) + end -sourceSpaceAttributeArray = [] -destinationSpaceAttributeArray = (space_sdk.find_space_attribute_definitions().content['spaceAttributeDefinitions']|| {}).map { |definition| definition['name']} + #Setting core paths utilzing variables - Check old_space_slug -> space_slug -> space_name + if !vars['core']['old_space_slug'].nil? + folderName = vars['core']['old_space_slug'] + elsif !vars['core']['space_slug'].nil? + folderName = vars['core']['space_slug'] + elsif !vars['core']['space_name'].nil? + folderName = vars['core']['space_name'] + else + puts "No space slug or name provided! Please provide one in order to export..." + gets + exit + end + core_path = File.join(platform_template_path, "exports", folderName, "core") + task_path = File.join(platform_template_path, "exports", folderName, "task") -if File.file?(file = "#{core_path}/space/spaceAttributeDefinitions.json") - spaceAttributeDefinitions = JSON.parse(File.read(file)) + # Output the yml file config + $logger.info "Output of Configuration File: \r #{JSON.pretty_generate(vars)}" - spaceAttributeDefinitions.each { |attribute| - if destinationSpaceAttributeArray.include?(attribute['name']) - space_sdk.update_space_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_space_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceSpaceAttributeArray.push(attribute['name']) - } -end + $logger.info "Setting up the SDK" -destinationSpaceAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceSpaceAttributeArray.include?(attribute) - space_sdk.delete_space_attribute_definition(attribute) - end -} - -# ------------------------------------------------------------------------------ -# Update User Attributes -# ------------------------------------------------------------------------------ -sourceUserAttributeArray = [] -destinationUserAttributeArray = (space_sdk.find_user_attribute_definitions().content['userAttributeDefinitions'] || {}).map { |definition| definition['name']} - -if File.file?(file = "#{core_path}/space/userAttributeDefinitions.json") - userAttributeDefinitions = JSON.parse(File.read(file)) - userAttributeDefinitions.each { |attribute| - if destinationUserAttributeArray.include?(attribute['name']) - space_sdk.update_user_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_user_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceUserAttributeArray.push(attribute['name']) - } -end + $space_sdk = KineticSdk::Core.new({ + space_server_url: vars["core"]["server_url"], + space_slug: vars["core"]["space_slug"], + username: vars["core"]["service_user_username"], + password: vars["core"]["service_user_password"], + options: http_options.merge({ export_directory: "#{core_path}" }) + }) -destinationUserAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceUserAttributeArray.include?(attribute) - space_sdk.delete_user_attribute_definition(attribute) + puts "Are you sure you want to perform an import of data from #{folderName} to #{vars["core"]["server_url"]}? [Y/N]" + STDOUT.flush + case (gets.downcase.chomp) + when 'y' + puts "Continuing Import" + STDOUT.flush + else + abort "Exiting Import" end -} -# ------------------------------------------------------------------------------ -# Update User Profile Attributes -# ------------------------------------------------------------------------------ -sourceUserProfileAttributeArray = [] -destinationUserProfileAttributeArray = (space_sdk.find_user_profile_attribute_definitions().content['userProfileAttributeDefinitions'] || {}).map { |definition| definition['name']} + -if File.file?(file = "#{core_path}/space/userProfileAttributeDefinitions.json") - userProfileAttributeDefinitions = JSON.parse(File.read(file)) + import_bridge_models(core_path,vars) - userProfileAttributeDefinitions.each { |attribute| - if destinationUserProfileAttributeArray.include?(attribute['name']) - space_sdk.update_user_profile_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_user_profile_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceUserProfileAttributeArray.push(attribute['name']) - } -end + # ------------------------------------------------------------------------------ + # delete bridge models + # Delete any Bridges from the destination which are missing from the import data + # ------------------------------------------------------------------------------ + import_space_web_apis(core_path) -destinationUserProfileAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceUserProfileAttributeArray.include?(attribute) - space_sdk.delete_user_profile_attribute_definition(attribute) - end -} + # ------------------------------------------------------------------------------ + # delete space teams + # TODO: A method doesn't exist for deleting the team + # ------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ + # import kapp data + # ------------------------------------------------------------------------------ -# ------------------------------------------------------------------------------ -# Update Team Attributes -# ------------------------------------------------------------------------------ + Dir["#{core_path}/space/kapps/*"].each { |file| + kpromises << Concurrent::Promise.execute(executor: $pool) do + begin + kapp_slug = file.split(File::SEPARATOR).map {|x| x=="" ? File::SEPARATOR : x}.last.gsub('.json','') + next if kapps_array.include?(kapp_slug) # If the loop has already iterated over the kapp from the kapp file or the kapp dir skip the iteration + kapps_array.push(kapp_slug) # Append the kapp_slug to an array so a duplicate iteration doesn't occur + kapp = {} + kapp['slug'] = kapp_slug # set kapp_slug + + if File.file?(file) or ( File.directory?(file) and File.file?(file = "#{file}.json") ) # If the file is a file or a dir with a corresponding json file + kapp = JSON.parse( File.read(file) ) + kappExists = $space_sdk.find_kapp(kapp['slug']).code.to_i == 200 + if kappExists + $space_sdk.update_kapp(kapp['slug'], kapp) + else + $space_sdk.add_kapp(kapp['name'], kapp['slug'], kapp) + end + end + + + import_kapp_attribute_definitions(core_path, kapp) + import_kapp_form_attribute_definitions(core_path,kapp) + import_kapp_form_type_definitions(core_path,kapp) + + import_kapp_security_policy_definitions(core_path, kapp) + + # ------------------------------------------------------------------------------ + # Migrate Kapp Categories + # ------------------------------------------------------------------------------ + import_kapp_categories(core_path) + + + + # ------------------------------------------------------------------------------ + # import space webhooks + # ------------------------------------------------------------------------------ + sourceSpaceWebhooksArray = [] + destinationSpaceWebhooksArray = ($space_sdk.find_webhooks_on_space({"include"=>"details"}).content['webhooks'] || {}).map{ |webhook| {"name" => webhook['name'], "updatedAt"=>webhook['updatedAt']} } + + Dir["#{core_path}/space/webhooks/*.json"].each{ |file| + webhook = JSON.parse(File.read(file)) + destinationWebhook = destinationSpaceWebhooksArray.find {|destination_webhook| destination_webhook['name'] == webhook['name']} + if destinationSpaceWebhooksArray.include?(webhook['name']) + + $space_sdk.update_webhook_on_space(webhook['name'], webhook) + elsif + $space_sdk.add_webhook_on_space(webhook) + end + sourceSpaceWebhooksArray.push(webhook['name']) + } -sourceTeamAttributeArray = [] -destinationTeamAttributeArray = (space_sdk.find_team_attribute_definitions().content['teamAttributeDefinitions']|| {}).map { |definition| definition['name']} + # ------------------------------------------------------------------------------ + # delete space webhooks + # TODO: A method doesn't exist for deleting the webhook + # ------------------------------------------------------------------------------ -if File.file?(file = "#{core_path}/space/teamAttributeDefinitions.json") - teamAttributeDefinitions = JSON.parse(File.read(file)) - teamAttributeDefinitions.each { |attribute| - if destinationTeamAttributeArray.include?(attribute['name']) - space_sdk.update_team_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_team_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + destinationSpaceWebhooksArray.each do |webhook| + if vars["options"]["delete"] && !sourceSpaceWebhooksArray.include?(webhook) + $space_sdk.delete_webhook_on_space(webhook) + end + end + + # ------------------------------------------------------------------------------ + # Migrate Kapp Webhooks + # ------------------------------------------------------------------------------ + sourceWebhookArray = [] + webhooks_on_kapp = $space_sdk.find_webhooks_on_kapp(kapp['slug']) + + if webhooks_on_kapp.code=="200" + destinationWebhookArray = (webhooks_on_kapp.content['webhooks'] || {}).map { |definition| definition['name']} + Dir["#{core_path}/space/kapps/#{kapp['slug']}/webhooks/*.json"].each{ |webhookFile| + webhookDef = JSON.parse(File.read(webhookFile)) + if destinationWebhookArray.include?(webhookDef['name']) + $space_sdk.update_webhook_on_kapp(kapp['slug'], webhookDef['name'], webhookDef) + else + $space_sdk.add_webhook_on_kapp(kapp['slug'], webhookDef) + end + sourceWebhookArray.push(webhookDef['name']) + } + + # ------------------------------------------------------------------------------ + # Delete Kapp Webhooks + # ------------------------------------------------------------------------------ + destinationWebhookArray.each { | attribute | + if vars["options"]["delete"] && !sourceWebhookArray.include?(attribute) + $space_sdk.delete_webhook_on_kapp(kapp['slug'],attribute) + end + } + end + + + + import_forms(core_path,kapp,vars) + + + ##TODO - Convert to csv upload + ## PATCH https://playground-travis-wiese.kinopsdev.io/app/api/v1/kapps/kapp1/forms/f1/submissions?import + ## + + # ------------------------------------------------------------------------------ + # Import Kapp Form Data + # ------------------------------------------------------------------------------ + + import_kapp_form_data(core_path,kapp) + import_kapp_web_apis(core_path,kapp) + + # ------------------------------------------------------------------------------ + # Delete Kapp Web APIs + # ------------------------------------------------------------------------------ + destinationWebApisArray.each { | webApi | + if vars["options"]["delete"] && !sourceWebApisArray.include?(webApi) + $space_sdk.delete_kapp_webapi(kapp['slug'], webApi) + end + } + rescue end - sourceTeamAttributeArray.push(attribute['name']) - } -end + end -destinationTeamAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceTeamAttributeArray.include?(attribute) - space_sdk.delete_team_attribute_definition(attribute) - end -} + } + kpromises.each(&:wait!) + + #End Kapp loop -# ------------------------------------------------------------------------------ -# Update Datastore Attributes -# ------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ + # task + # ------------------------------------------------------------------------------ -sourceDatastoreAttributeArray = [] -destinationDatastoreAttributeArray =(space_sdk.find_datastore_form_attribute_definitions().content['datastoreFormAttributeDefinitions'] || {}).map { |definition| definition['name']} + $task_sdk = KineticSdk::Task.new({ + app_server_url: "#{vars["task"]["server_url"]}", + username: vars["task"]["service_user_username"], + password: vars["task"]["service_user_password"], + options: http_options.merge({ export_directory: "#{task_path}" }) + }) -if File.file?(file = "#{core_path}/space/datastoreFormAttributeDefinitions.json") - datastoreFormAttributeDefinitions = JSON.parse(File.read(file)) - datastoreFormAttributeDefinitions.each { |attribute| - if destinationDatastoreAttributeArray.include?(attribute['name']) - space_sdk.update_datastore_form_attribute_definition(attribute['name'], attribute) - else - space_sdk.add_datastore_form_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceDatastoreAttributeArray.push(attribute['name']) - } -end + # ------------------------------------------------------------------------------ + # task import + # ------------------------------------------------------------------------------ -destinationDatastoreAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceDatastoreAttributeArray.include?(attribute) - #Delete form is disabled - #space_sdk.delete_datastore_form_attribute_definition(attribute) - end -} + $logger.info "Importing the task components for the \"#{template_name}\" template." + $logger.info " importing with api: #{$task_sdk.api_url}" + # ------------------------------------------------------------------------------ + # task handlers + # ------------------------------------------------------------------------------ -# ------------------------------------------------------------------------------ -# Update Security Policy -# ------------------------------------------------------------------------------ + # import handlers forcing overwrite + $task_sdk.import_handlers_threaded(true) -sourceSecurityPolicyArray = [] -destinationSecurityPolicyArray = (space_sdk.find_space_security_policy_definitions().content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} + # ------------------------------------------------------------------------------ + # Import Task Trees and Routines + # ------------------------------------------------------------------------------ -if File.file?(file = "#{core_path}/space/securityPolicyDefinitions.json") - securityPolicyDefinitions = JSON.parse(File.read(file)) - securityPolicyDefinitions.each { |attribute| - if destinationSecurityPolicyArray.include?(attribute['name']) - space_sdk.update_space_security_policy_definition(attribute['name'], attribute) - else - space_sdk.add_space_security_policy_definition(attribute) - end - sourceSecurityPolicyArray.push(attribute['name']) - } -end + # import routines and force overwrite + $task_sdk.import_routines_threaded(true) + # import trees and force overwrite + $task_sdk.import_trees_threaded(true) -destinationSecurityPolicyArray.each { | attribute | - if vars["options"]["delete"] && !sourceSecurityPolicyArray.include?(attribute) - space_sdk.delete_space_security_policy_definition(attribute) - end -} -# ------------------------------------------------------------------------------ -# import bridge models -# *NOTE* - This if the bridge doesn't exist the model will be imported w/ an empty "Bridge Slug" value. -# ------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ + # import task categories + # ------------------------------------------------------------------------------ -destinationModels = space_sdk.find_bridge_models() -destinationModels_Array = (destinationModels.content['models'] || {}).map{ |model| model['name']} + sourceCategories = [] #From import data + destinationCategories = ($task_sdk.find_categories().content['categories'] || {}).map{ |category| {'category'=>category['name'],'updatedAt'=>category['updatedAt']}} -Dir["#{core_path}/space/models/*.json"].each{ |model| - body = JSON.parse(File.read(model)) - if destinationModels_Array.include?(body['name']) - space_sdk.update_bridge_model(body['name'], body) - elsif - space_sdk.add_bridge_model(body) - end -} + #TODO - No updatedAt in category file + Dir["#{task_path}/categories/*.json"].each { |file| + category = JSON.parse(File.read(file)) -# ------------------------------------------------------------------------------ -# delete bridge models -# Delete any Bridges from the destination which are missing from the import data -# ------------------------------------------------------------------------------ -SourceModelsArray = Dir["#{core_path}/space/models/*.json"].map{ |model| JSON.parse(File.read(model))['name'] } + sourceCategories.push(category['name']) -destinationModels_Array.each do |model| - if vars["options"]["delete"] && !SourceModelsArray.include?(model) - space_sdk.delete_bridge_model(model) - end -end + if destinationCategories.include?(category['name']) + $task_sdk.update_category(category['name'], category) + else + $task_sdk.add_category(category) + end + } -# ------------------------------------------------------------------------------ -# Import Space Web APIs -# ------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ + # delete task categories + # ------------------------------------------------------------------------------ -sourceSpaceWebApisArray = [] -destinationSpaceWebApisArray = (space_sdk.find_space_webapis().content['webApis'] || {}).map { |definition| definition['slug']} + destinationCategories.each { |category| + if vars["options"]["delete"] && !sourceCategories.include?(category) + $task_sdk.delete_category(category) + end + } - -Dir["#{core_path}/space/webApis/*"].each{ |file| - body = JSON.parse(File.read(file)) - if destinationSpaceWebApisArray.include?(body['slug']) - space_sdk.update_space_webapi(body['slug'], body) - else - space_sdk.add_space_webapi(body) - end - sourceSpaceWebApisArray.push(body['slug']) -} - -# ------------------------------------------------------------------------------ -# Delete Space Web APIs -# Delete any Web APIs from the destination which are missing from the import data -# ------------------------------------------------------------------------------ -destinationSpaceWebApisArray.each { | webApi | - if vars["options"]["delete"] && !sourceSpaceWebApisArray.include?(webApi) - space_sdk.delete_space_webapi(webApi) - end -} - -# ------------------------------------------------------------------------------ -# import datastore forms -# ------------------------------------------------------------------------------ -destinationDatastoreForms = [] #From destination server -sourceDatastoreForms = [] #From import data - -logger.info "Importing datastore forms for #{vars["core"]["space_slug"]}" - - destinationDatastoreForms = (space_sdk.find_datastore_forms().content['forms'] || {}).map{ |datastore| datastore['slug']} - Dir["#{core_path}/space/datastore/forms/*.json"].each { |datastore| - body = JSON.parse(File.read(datastore)) - sourceDatastoreForms.push(body['slug']) - if destinationDatastoreForms.include?(body['slug']) - space_sdk.update_datastore_form(body['slug'], body) + # ------------------------------------------------------------------------------ + # import task policy rules + # ------------------------------------------------------------------------------ + + destinationPolicyRuleArray = $task_sdk.find_policy_rules().content['policyRules'] + sourcePolicyRuleArray = Dir["#{task_path}/policyRules/*.json"].map{ |file| + rule = JSON.parse(File.read(file)) + {"name" => rule['name'], "type" => rule['type']} + } + + Dir["#{task_path}/policyRules/*.json"].each { |file| + rule = JSON.parse(File.read(file)) + if !destinationPolicyRuleArray.find {|dest_rule| dest_rule['name']==rule['name'] && dest_rule['type']==rule['type'] }.nil? + $task_sdk.update_policy_rule(rule.slice('type', 'name'), rule) else - space_sdk.add_datastore_form(body) + $task_sdk.add_policy_rule(rule) end } -# ------------------------------------------------------------------------------ -# delete datastore forms -# Delete any form from the destination which are missing from the import data -# ------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ + # delete task policy rules + # ------------------------------------------------------------------------------ + destinationPolicyRuleArray.each { |rule| + if vars["options"]["delete"] && sourcePolicyRuleArray.find {|source_rule| source_rule['name']==rule['name'] && source_rule['type']==rule['type'] }.nil? + $task_sdk.delete_policy_rule(rule) + end + } + # ------------------------------------------------------------------------------ + # Delete Trees and Routines not in the Source Data + # ------------------------------------------------------------------------------ -destinationDatastoreForms.each { |datastore_slug| - if vars["options"]["delete"] && !sourceDatastoreForms.include?(datastore_slug) - space_sdk.delete_datastore_form(datastore_slug) - end -} - -# ------------------------------------------------------------------------------ -# Import Datastore Data -# ------------------------------------------------------------------------------ -Dir["#{core_path}/space/datastore/forms/**/submissions*.ndjson"].sort.each { |filename| - dir = File.dirname(filename) - form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] - (space_sdk.find_all_form_datastore_submissions(form_slug).content['submissions'] || []).each { |submission| - space_sdk.delete_datastore_submission(submission['id']) + # identify Trees and Routines on destination + destinationtrees = [] + trees = $task_sdk.find_trees().content + (trees['trees'] || []).each { |tree| + destinationtrees.push( tree['title'] ) } - File.readlines(filename).each { |line| - submission = JSON.parse(line) - submission["values"].map { |field, value| - # if the value contains an array of files - if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') - value.map.with_index { |file, index| - # add 'path' key to the attribute value indicating the location of the attachment - file['path'] = "#{dir}#{file['path']}" - } - end + + # identify Routines in source data + begin + sourceTrees = [] + Dir["#{task_path}/routines/*.xml"].each {|routine| + doc = REXML::Document.new(File.new(routine)) + root = doc.root + sourceTrees.push("#{root.elements["taskTree/name"].text}") } - body = { - "values" => submission["values"], - "coreState" => submission["coreState"] + rescue + $logger.error "Error while identifying routines" + end + + begin + # identify trees in source data + Dir["#{task_path}/sources/*"].each {|source| + if File.directory? source + Dir["#{source}/trees/*.xml"].each { |tree| + doc = REXML::Document.new(File.new(tree)) + root = doc.root + tree = "#{root.elements["sourceName"].text} :: #{root.elements["sourceGroup"].text} :: #{root.elements["taskTree/name"].text}" + sourceTrees.push(tree) + } + end } - space_sdk.add_datastore_submission(form_slug, body).content - } -} - -# ------------------------------------------------------------------------------ -# import space teams -# ------------------------------------------------------------------------------ -if (teams = Dir["#{core_path}/space/teams/*.json"]).length > 0 - SourceTeamArray = [] - destinationTeamsArray = (space_sdk.find_teams().content['teams'] || {}).map{ |team| {"slug" => team['slug'], "name"=>team['name']} } - teams.each{ |team| - body = JSON.parse(File.read(team)) - if !destinationTeamsArray.find {|destination_team| destination_team['slug'] == body['slug'] }.nil? - space_sdk.update_team(body['slug'], body) - else - space_sdk.add_team(body) - end - #Add Attributes to the Team - (body['attributes'] || []).each{ | attribute | - space_sdk.add_team_attribute(body['name'], attribute['name'], attribute['values']) + rescue + $logger.error "Error identifying trees" + end + + begin + # Delete the extra tress and routines on the source + destinationtrees.each { | tree | + if vars["options"]["delete"] && !sourceTrees.include?(tree) + treeDef = tree.split(' :: ') + $task_sdk.delete_tree( tree ) + end } - SourceTeamArray.push({'name' => body['name'], 'slug'=>body['slug']} ) - } + rescue + $logger.error "Error deleting extra trees/routines on source" + end + + + # Import v6 workflows as these are not not the same as Trees and Routines + $logger.info "Importing workflows" + $space_sdk.import_workflows(vars["core"]["space_slug"]) # ------------------------------------------------------------------------------ - # delete space teams - # TODO: A method doesn't exist for deleting the team + # complete # ------------------------------------------------------------------------------ - destinationTeamsArray.each { |team| - #if !SourceTeamArray.include?(team) - if SourceTeamArray.find {|source_team| source_team['slug'] == team['slug'] }.nil? - #Delete has been disabled. It is potentially too dangerous to include w/o advanced knowledge. - #space_sdk.delete_team(team['slug']) - end - } + $logger.info "Finished importing the \"#{template_name}\" forms." + + $pool.shutdown + $pool.wait_for_termination + end -# ------------------------------------------------------------------------------ -# import kapp data -# ------------------------------------------------------------------------------ - -kapps_array = [] -Dir["#{core_path}/space/kapps/*"].each { |file| - kapp_slug = file.split(File::SEPARATOR).map {|x| x=="" ? File::SEPARATOR : x}.last.gsub('.json','') - next if kapps_array.include?(kapp_slug) # If the loop has already iterated over the kapp from the kapp file or the kapp dir skip the iteration - kapps_array.push(kapp_slug) # Append the kapp_slug to an array so a duplicate iteration doesn't occur - kapp = {} - kapp['slug'] = kapp_slug # set kapp_slug - - if File.file?(file) or ( File.directory?(file) and File.file?(file = "#{file}.json") ) # If the file is a file or a dir with a corresponding json file - kapp = JSON.parse( File.read(file) ) - kappExists = space_sdk.find_kapp(kapp['slug']).code.to_i == 200 - if kappExists - space_sdk.update_kapp(kapp['slug'], kapp) - else - space_sdk.add_kapp(kapp['name'], kapp['slug'], kapp) - end - end + + + + + + ################################################################################ + # Import Methods + ################################################################################ # ------------------------------------------------------------------------------ - # Migrate Kapp Attribute Definitions + # Update Space Attributes # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/kappAttributeDefinitions.json") - sourceKappAttributeArray = [] - destinationKappAttributeArray = (space_sdk.find_kapp_attribute_definitions(kapp['slug']).content['kappAttributeDefinitions'] || {}).map { |definition| definition['name']} - kappAttributeDefinitions = JSON.parse(File.read(file)) - (kappAttributeDefinitions || []).each { |attribute| - if destinationKappAttributeArray.include?(attribute['name']) - space_sdk.update_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceKappAttributeArray.push(attribute['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Attribute Definitions - # ------------------------------------------------------------------------------ - destinationKappAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceKappAttributeArray.include?(attribute) - space_sdk.delete_kapp_attribute_definition(kapp['slug'],attribute) + + def update_space_attributes(core_path) + sourceSpaceAttributeArray = [] + destinationSpaceAttributeArray = ($space_sdk.find_space_attribute_definitions().content['spaceAttributeDefinitions']|| {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/spaceAttributeDefinitions.json") + spaceAttributeDefinitions = JSON.parse(File.read(file)) + + spaceAttributeDefinitions.each { |attribute| + if destinationSpaceAttributeArray.include?(attribute['name']) + $space_sdk.update_space_attribute_definition(attribute['name'], attribute) + else + $space_sdk.add_space_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceSpaceAttributeArray.push(attribute['name']) + } + end + destinationSpaceAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceSpaceAttributeArray.include?(attribute) + $space_sdk.delete_space_attribute_definition(attribute) end } end + + + # ------------------------------------------------------------------------------ - # Migrate Kapp Category Definitions + # Update User Attributes # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categoryAttributeDefinitions.json") - sourceKappCategoryArray = [] - destinationKappAttributeArray = (space_sdk.find_category_attribute_definitions(kapp['slug']).content['categoryAttributeDefinitions'] || {}).map { |definition| definition['name']} - kappCategoryDefinitions = JSON.parse(File.read(file)) - (kappCategoryDefinitions || []).each { |attribute| - if destinationKappAttributeArray.include?(attribute['name']) - space_sdk.update_category_attribute_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_category_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceKappCategoryArray.push(attribute['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Category Definitions - # ------------------------------------------------------------------------------ - destinationKappAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceKappCategoryArray.include?(attribute) - space_sdk.delete_category_attribute_definition(kapp['slug'],attribute) + def update_user_attributes( core_path) + sourceUserAttributeArray = [] + destinationUserAttributeArray = ($space_sdk.find_user_attribute_definitions().content['userAttributeDefinitions'] || {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/userAttributeDefinitions.json") + userAttributeDefinitions = JSON.parse(File.read(file)) + userAttributeDefinitions.each { |attribute| + if destinationUserAttributeArray.include?(attribute['name']) + $space_sdk.update_user_attribute_definition(attribute['name'], attribute) + else + $space_sdk.add_user_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceUserAttributeArray.push(attribute['name']) + } + end + + destinationUserAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceUserAttributeArray.include?(attribute) + $space_sdk.delete_user_attribute_definition(attribute) end } end - + # ------------------------------------------------------------------------------ - # Migrate Kapp Form Attribute Definitions + # Update User Profile Attributes # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formAttributeDefinitions.json") - sourceFormAttributeArray = [] - destinationFormAttributeArray = (space_sdk.find_form_attribute_definitions(kapp['slug']).content['formAttributeDefinitions'] || {}).map { |definition| definition['name']} - formAttributeDefinitions = JSON.parse(File.read(file)) - (formAttributeDefinitions || []).each { |attribute| - if destinationFormAttributeArray.include?(attribute['name']) - space_sdk.update_form_attribute_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_form_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) - end - sourceFormAttributeArray.push(attribute['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Form Attribute Definitions - # ------------------------------------------------------------------------------ - destinationFormAttributeArray.each { | attribute | - if vars["options"]["delete"] && !sourceFormAttributeArray.include?(attribute) - space_sdk.delete_form_attribute_definition(kapp['slug'],attribute) + def update_user_profile_attributes(core_path) + sourceUserProfileAttributeArray = [] + destinationUserProfileAttributeArray = ($space_sdk.find_user_profile_attribute_definitions().content['userProfileAttributeDefinitions'] || {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/userProfileAttributeDefinitions.json") + userProfileAttributeDefinitions = JSON.parse(File.read(file)) + + userProfileAttributeDefinitions.each { |attribute| + if destinationUserProfileAttributeArray.include?(attribute['name']) + $space_sdk.update_user_profile_attribute_definition(attribute['name'], attribute) + else + $space_sdk.add_user_profile_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceUserProfileAttributeArray.push(attribute['name']) + } + end + + destinationUserProfileAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceUserProfileAttributeArray.include?(attribute) + $space_sdk.delete_user_profile_attribute_definition(attribute) end } end - + + + # ------------------------------------------------------------------------------ - # Migrate Kapp Form Type Definitions + # Update Team Attributes # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formTypes.json") - sourceFormTypesArray = [] - destinationFormTypesArray = (space_sdk.find_formtypes(kapp['slug']).content['formTypes'] || {}).map { |formTypes| formTypes['name']} - formTypes = JSON.parse(File.read(file)) - (formTypes || []).each { |body| - if destinationFormTypesArray.include?(body['name']) - space_sdk.update_formtype(kapp['slug'], body['name'], body) - else - space_sdk.add_formtype(kapp['slug'], body) - end - sourceFormTypesArray.push(body['name']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Form Type Definitions - # ------------------------------------------------------------------------------ - destinationFormTypesArray.each { | name | - if vars["options"]["delete"] && !sourceFormTypesArray.include?(name) - space_sdk.delete_formtype(kapp['slug'],name) + def update_team_attributes( core_path) + sourceTeamAttributeArray = [] + destinationTeamAttributeArray = ($space_sdk.find_team_attribute_definitions().content['teamAttributeDefinitions']|| {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/teamAttributeDefinitions.json") + teamAttributeDefinitions = JSON.parse(File.read(file)) + teamAttributeDefinitions.each { |attribute| + if destinationTeamAttributeArray.include?(attribute['name']) + $space_sdk.update_team_attribute_definition(attribute['name'], attribute) + else + $space_sdk.add_team_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceTeamAttributeArray.push(attribute['name']) + } + end + + destinationTeamAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceTeamAttributeArray.include?(attribute) + $space_sdk.delete_team_attribute_definition(attribute) end } end + # ------------------------------------------------------------------------------ - # Migrate Kapp Security Policy Definitions + # Update Datastore Attributes # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/securityPolicyDefinitions.json") - sourceSecurtyPolicyArray = [] - destinationSecurtyPolicyArray = (space_sdk.find_security_policy_definitions(kapp['slug']).content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} - securityPolicyDefinitions = JSON.parse(File.read(file)) - (securityPolicyDefinitions || []).each { |attribute| - if destinationSecurtyPolicyArray.include?(attribute['name']) - space_sdk.update_security_policy_definition(kapp['slug'], attribute['name'], attribute) - else - space_sdk.add_security_policy_definition(kapp['slug'], attribute) - end - sourceSecurtyPolicyArray.push(attribute['name']) - } + def update_datastore_attributes( core_path) + sourceDatastoreAttributeArray = [] + destinationDatastoreAttributeArray =($space_sdk.find_datastore_form_attribute_definitions().content['datastoreFormAttributeDefinitions'] || {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/datastoreFormAttributeDefinitions.json") + datastoreFormAttributeDefinitions = JSON.parse(File.read(file)) + datastoreFormAttributeDefinitions.each { |attribute| + if destinationDatastoreAttributeArray.include?(attribute['name']) + $space_sdk.update_datastore_form_attribute_definition(attribute['name'], attribute) + else + $space_sdk.add_datastore_form_attribute_definition(attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceDatastoreAttributeArray.push(attribute['name']) + } + end - destinationSecurtyPolicyArray.each { | attribute | - if vars["options"]["delete"] && !sourceSecurtyPolicyArray.include?(attribute) - space_sdk.delete_security_policy_definition(kapp['slug'],attribute) + destinationDatastoreAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceDatastoreAttributeArray.include?(attribute) + #Delete form is disabled + #$space_sdk.delete_datastore_form_attribute_definition(attribute) end } end - + + # ------------------------------------------------------------------------------ - # Migrate Kapp Categories + # Update Security Policy # ------------------------------------------------------------------------------ - if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categories.json") - sourceCategoryArray = [] - destinationCategoryArray = (space_sdk.find_categories(kapp['slug']).content['categories'] || {}).map { |definition| definition['slug']} - categories = JSON.parse(File.read(file)) - (categories || []).each { |attribute| - if destinationCategoryArray.include?(attribute['slug']) - space_sdk.update_category_on_kapp(kapp['slug'], attribute['slug'], attribute) - else - space_sdk.add_category_on_kapp(kapp['slug'], attribute) - end - sourceCategoryArray.push(attribute['slug']) - } - # ------------------------------------------------------------------------------ - # Delete Kapp Categories - # ------------------------------------------------------------------------------ - - destinationCategoryArray.each { | attribute | - if vars["options"]["delete"] && !sourceCategoryArray.include?(attribute) - space_sdk.delete_category_on_kapp(kapp['slug'],attribute) + def update_security_policy( core_path) + sourceSecurityPolicyArray = [] + destinationSecurityPolicyArray = ($space_sdk.find_space_security_policy_definitions().content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} + + if File.file?(file = "#{core_path}/space/securityPolicyDefinitions.json") + securityPolicyDefinitions = JSON.parse(File.read(file)) + securityPolicyDefinitions.each { |attribute| + if destinationSecurityPolicyArray.include?(attribute['name']) + $space_sdk.update_space_security_policy_definition(attribute['name'], attribute) + else + $space_sdk.add_space_security_policy_definition(attribute) + end + sourceSecurityPolicyArray.push(attribute['name']) + } + end + + destinationSecurityPolicyArray.each { | attribute | + if vars["options"]["delete"] && !sourceSecurityPolicyArray.include?(attribute) + $space_sdk.delete_space_security_policy_definition(attribute) end } end # ------------------------------------------------------------------------------ - # import space webhooks + # Delete Space Web APIs + # Delete any Web APIs from the destination which are missing from the import data # ------------------------------------------------------------------------------ - sourceSpaceWebhooksArray = [] - destinationSpaceWebhooksArray = (space_sdk.find_webhooks_on_space().content['webhooks'] || {}).map{ |webhook| webhook['name']} - - Dir["#{core_path}/space/webhooks/*.json"].each{ |file| - webhook = JSON.parse(File.read(file)) - if destinationSpaceWebhooksArray.include?(webhook['name']) - space_sdk.update_webhook_on_space(webhook['name'], webhook) - elsif - space_sdk.add_webhook_on_space(webhook) - end - sourceSpaceWebhooksArray.push(webhook['name']) - } + def delete_space_web_apis( core_path) + destinationSpaceWebApisArray.each { | webApi | + if vars["options"]["delete"] && !sourceSpaceWebApisArray.include?(webApi) + $space_sdk.delete_space_webapi(webApi) + end + } + end + # ------------------------------------------------------------------------------ - # delete space webhooks - # TODO: A method doesn't exist for deleting the webhook + # import datastore forms # ------------------------------------------------------------------------------ - destinationSpaceWebhooksArray.each do |webhook| - if vars["options"]["delete"] && !sourceSpaceWebhooksArray.include?(webhook) - space_sdk.delete_webhook_on_space(webhook) - end - end + def import_datastore_forms( core_path) + $logger.info "Importing datastore forms for #{vars["core"]["space_slug"]}" + #TODO - Suffers from 1000 query limit + destinationDatastoreForms = [] #From destination server + sourceDatastoreForms = [] #From import data + destinationDatastoreForms = ($space_sdk.find_datastore_forms().content['forms'] || {}).map{ |datastore| datastore['slug']} + Dir["#{core_path}/space/datastore/forms/*.json"].each { |datastore| + body = JSON.parse(File.read(datastore)) + sourceDatastoreForms.push(body['slug']) + if destinationDatastoreForms.include?(body['slug']) + $space_sdk.update_datastore_form(body['slug'], body) + else + $space_sdk.add_datastore_form(body) + end + } + end + # ------------------------------------------------------------------------------ - # Migrate Kapp Webhooks + # delete datastore forms + # Delete any form from the destination which are missing from the import data # ------------------------------------------------------------------------------ - sourceWebhookArray = [] - webhooks_on_kapp = space_sdk.find_webhooks_on_kapp(kapp['slug']) - - if webhooks_on_kapp.code=="200" - destinationWebhookArray = (webhooks_on_kapp.content['webhooks'] || {}).map { |definition| definition['name']} - Dir["#{core_path}/space/kapps/#{kapp['slug']}/webhooks/*.json"].each{ |webhookFile| - webhookDef = JSON.parse(File.read(webhookFile)) - if destinationWebhookArray.include?(webhookDef['name']) - space_sdk.update_webhook_on_kapp(kapp['slug'], webhookDef['name'], webhookDef) - else - space_sdk.add_webhook_on_kapp(kapp['slug'], webhookDef) - end - sourceWebhookArray.push(webhookDef['name']) - } - - # ------------------------------------------------------------------------------ - # Delete Kapp Webhooks - # ------------------------------------------------------------------------------ - destinationWebhookArray.each { | attribute | - if vars["options"]["delete"] && !sourceWebhookArray.include?(attribute) - space_sdk.delete_webhook_on_kapp(kapp['slug'],attribute) + def delete_datastore_forms(core_path) + destinationDatastoreForms.each { |datastore_slug| + if vars["options"]["delete"] && !sourceDatastoreForms.include?(datastore_slug) + $space_sdk.delete_datastore_form(datastore_slug) end } - end + end + # ------------------------------------------------------------------------------ - # Add Kapp Forms + # Import Datastore Data # ------------------------------------------------------------------------------ - - if (forms = Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/*.json"]).length > 0 - sourceForms = [] #From import data - destinationForms = (space_sdk.find_forms(kapp['slug']).content['forms'] || {}).map{ |form| form['slug']} - forms.each { |form| - properties = File.read(form) - form = JSON.parse(properties) - sourceForms.push(form['slug']) - if destinationForms.include?(form['slug']) - space_sdk.update_form(kapp['slug'] ,form['slug'], form) - else - space_sdk.add_form(kapp['slug'], form) - end + + def import_datastore_data( core_path) + Dir["#{core_path}/space/datastore/forms/**/submissions*.ndjson"].sort.each { |filename| + dir = File.dirname(filename) + form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] + ($space_sdk.find_all_form_datastore_submissions(form_slug).content['submissions'] || []).each { |submission| + $space_sdk.delete_datastore_submission(submission['id']) + } + File.readlines(filename).each { |line| + submission = JSON.parse(line) + submission["values"].map { |field, value| + # if the value contains an array of files + if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') + value.map.with_index { |file, index| + # add 'path' key to the attribute value indicating the location of the attachment + file['path'] = "#{dir}#{file['path']}" + } + end + } + body = { + "values" => submission["values"], + "coreState" => submission["coreState"] + } + $space_sdk.add_datastore_submission(form_slug, body).content + } } - # ------------------------------------------------------------------------------ - # delete forms - # ------------------------------------------------------------------------------ - destinationForms.each { |slug| - if vars["options"]["delete"] && !sourceForms.include?(slug) - #Delete form is disabled - #space_sdk.delete_form(kapp['slug'], slug) - end - } end - + # ------------------------------------------------------------------------------ - # Import Kapp Form Data + # import space teams # ------------------------------------------------------------------------------ - Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/**/submissions*.ndjson"].sort.each { |filename| - dir = File.dirname(filename) - form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] - - # This code could delete all submissions form the form before importing new data - # It is commented out because it could be dangerous to have in place and the delete_submission method doesn't exist currently. - #(space_sdk.find_all_form_submissions(kapp['slug'], form_slug).content['submissions'] || []).each { |submission| - # space_sdk.delete_submission(submission['id']) - #} + + def import_space_teams( core_path) - File.readlines(filename).each { |line| - submission = JSON.parse(line) - submission["values"].map { |field, value| - # if the value contains an array of files - if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') - value.map.with_index { |file, index| - # add 'path' key to the attribute value indicating the location of the attachment - file['path'] = "#{dir}#{file['path']}" - } + if (teams = Dir["#{core_path}/space/teams/*.json"]).length > 0 + sourceTeamArray = [] + destinationTeamsArray = ($space_sdk.find_teams({"include"=>"details"}).content['teams'] || {}).map{ |team| {"slug" => team['slug'], "name"=>team['name'], "updatedAt"=>team['updatedAt']} } + teams.each{ |team| + body = JSON.parse(File.read(team)) + destinationTeam = destinationTeamsArray.find {|destination_team| destination_team['slug'] == body['slug']} + if !destination_team.nil? + #If no updates, skip + if destination_team['updatedAt'] != team['updatedAt'] + $space_sdk.update_team(body['slug'], body) + else + end + + else + $space_sdk.add_team(body) + end + #Add Attributes to the Team + (body['attributes'] || []).each{ | attribute | + $space_sdk.add_team_attribute(body['name'], attribute['name'], attribute['values']) + } + sourceTeamArray.push({'name' => body['name'], 'slug'=>body['slug']} ) } - body = { - "values" => submission["values"], - "coreState" => submission["coreState"] + destinationTeamsArray.each { |team| + #if !SourceTeamArray.include?(team) + if sourceTeamArray.find {|source_team| source_team['slug'] == team['slug'] }.nil? + #Delete has been disabled. It is potentially too dangerous to include w/o advanced knowledge. + #$space_sdk.delete_team(team['slug']) + end } - space_sdk.add_submission(kapp['slug'], form_slug, body).content - } - } - # ------------------------------------------------------------------------------ - # Add Kapp Web APIs - # ------------------------------------------------------------------------------ - sourceWebApisArray = [] - destinationWebApisArray = (space_sdk.find_kapp_webapis(kapp['slug']).content['webApis'] || {}).map { |definition| definition['slug']} - Dir["#{core_path}/space/kapps/#{kapp['slug']}/webApis/*"].each { |webApi| - body = JSON.parse(File.read(webApi)) - if destinationWebApisArray.include?(body['slug']) - space_sdk.update_kapp_webapi(kapp['slug'], body['slug'], body) - else - space_sdk.add_kapp_webapi(kapp['slug'], body) end - sourceWebApisArray.push(body['slug']) - } + end + # ------------------------------------------------------------------------------ - # Delete Kapp Web APIs + # Import Kapp Categories # ------------------------------------------------------------------------------ - destinationWebApisArray.each { | webApi | - if vars["options"]["delete"] && !sourceWebApisArray.include?(webApi) - space_sdk.delete_kapp_webapi(kapp['slug'], webApi) + def import_kapp_categories(core_path) + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categories.json") + sourceCategoryArray = [] + destinationCategoryArray = ($space_sdk.find_categories(kapp['slug']).content['categories'] || {}).map { |definition| definition['slug']} + categories = JSON.parse(File.read(file)) + (categories || []).each { |attribute| + if destinationCategoryArray.include?(attribute['slug']) + $space_sdk.update_category_on_kapp(kapp['slug'], attribute['slug'], attribute) + else + $space_sdk.add_category_on_kapp(kapp['slug'], attribute) + end + sourceCategoryArray.push(attribute['slug']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Categories + # ------------------------------------------------------------------------------ + + destinationCategoryArray.each { | attribute | + if vars["options"]["delete"] && !sourceCategoryArray.include?(attribute) + $space_sdk.delete_category_on_kapp(kapp['slug'],attribute) + end + } end - } -} - -# ------------------------------------------------------------------------------ -# task -# ------------------------------------------------------------------------------ - -task_sdk = KineticSdk::Task.new({ - app_server_url: "#{vars["task"]["server_url"]}", - username: vars["task"]["service_user_username"], - password: vars["task"]["service_user_password"], - options: http_options.merge({ export_directory: "#{task_path}" }) -}) + end + ################################################################################ + # Helpers + ################################################################################ -# ------------------------------------------------------------------------------ -# task import -# ------------------------------------------------------------------------------ + #Configuration Selection + def config_selection(config_folder_path) -logger.info "Importing the task components for the \"#{template_name}\" template." -logger.info " importing with api: #{task_sdk.api_url}" + #Ensure config folder exists + if !File.directory?(config_folder_path) + $logger.info "Config folder not found at #{config_folder_path}" + puts "Cannot find config folder!" + puts "Exiting..." + gets + exit + end -# ------------------------------------------------------------------------------ -# task handlers -# ------------------------------------------------------------------------------ + # #Determine Config file to use + config_exts = ['.yaml','.yml'] + configArray = [] + $logger.info "Checking #{config_folder_path} for config files" + #Check config folder for yaml/yml files containing the word 'import' + begin + Find.find("#{config_folder_path}/") do |file| + configArray.append(File.basename(file)) if config_exts.include?(File.extname(file)) && (File.basename(file).include?('import')) + end + rescue error + #No config files found in config folder + $logger.error "Error finding default config file path!" + $logger.error "Error reported: #{error}" + puts "Cannot find config files in default path! (#{pwd})" + puts "Exiting script..." + gets + exit + end + $logger.info "Found config files" -# import handlers forcing overwrite -task_sdk.import_handlers(true) + #Print config file options with number indicators to select + puts "Select your config file" + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + $logger.info "Select section" + begin + print "Selection (0 to repeat options): " + sel = gets.chomp.to_i + begin + if sel === 0 + configArray.each_with_index do |cFile, index| + puts "#{index+1}) #{cFile}" + end + next + end + configFile = configArray[sel-1] + $logger.info "Option #{sel} - #{configFile}" + break + rescue + $logger.info "Error selecting config file! Exiting..." + puts "Error selecting config file!" + puts "Exiting..." + gets + exit + end + end while true + return configFile + end + + #Check if nil/unencoded and update accordingly +def SecurePWD(file,vars,pwdAttribute) + #If no pwd, then ask for one, otherwise take current string that was not found to be B64 and convert + if [pwdAttribute]["service_user_password"].nil? + password = IO::console.getpass "Enter Password(#{pwdAttribute}): " + else + password = vars[pwdAttribute]["service_user_password"] + end + enc = Base64.strict_encode64(password) + vars[pwdAttribute]["service_user_password"] = enc.to_s + begin + fileObj = File.open(file, 'w') + puts "Updated pwd in #{pwdAttribute} to #{enc}" + fileObj.write vars.to_yaml + #{ |f| f.write vars.to_yaml } + rescue ArgumentError + $logger.error("There was an error while updating variables file:") + $logger.error(ArgumentError) + ensure + fileObj.close + end +end -# ------------------------------------------------------------------------------ -# Import Task Trees and Routines -# ------------------------------------------------------------------------------ +#Decode password to utilize +def DecodePWD(file, vars, pwdLoc) + pwdAttribute = vars[pwdLoc]["service_user_password"] + return Base64.decode64(pwdAttribute) +end -# import routines and force overwrite -task_sdk.import_routines(true) -# import trees and force overwrite -task_sdk.import_trees(true) +#Confirm passwords exist and are in a proper format, call SecurePWD for any exceptions +def ValidatePWD(file, vars) + $pwdFields.each do |field| + t = vars[field]["service_user_password"] + #See if not a string, not encoded, or default + if !t.is_a?(String) || Base64.strict_encode64(Base64.decode64(t)) != t || t === "" + puts "Updating password #{t}" + SecurePWD(file, vars, field) + end + end +end +def convert_json_to_csv(json_file) + csv_file = json_file.gsub("ndjson","csv") + CSV.open(csv_file, 'w') do |csv| + File.foreach(json_file).with_index do |line, index| + record = JSON.parse(line) + + # Write header on first row + csv << record.keys if index == 0 + + # Write values + csv << record.values + end + end + end + def compare_forms(kapp_slug, old_form) + end -# ------------------------------------------------------------------------------ -# import task categories -# ------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ + # Migrate Kapp Form Attribute Definitions + # ------------------------------------------------------------------------------ + def import_kapp_form_attribute_definitions(core_path,kapp) + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formAttributeDefinitions.json") + sourceFormAttributeArray = [] + destinationFormAttributeArray = ($space_sdk.find_form_attribute_definitions(kapp['slug']).content['formAttributeDefinitions'] || {}).map { |definition| definition['name']} + formAttributeDefinitions = JSON.parse(File.read(file)) + (formAttributeDefinitions || []).each { |attribute| + if destinationFormAttributeArray.include?(attribute['name']) + $space_sdk.update_form_attribute_definition(kapp['slug'], attribute['name'], attribute) + else + $space_sdk.add_form_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceFormAttributeArray.push(attribute['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Form Attribute Definitions + # ------------------------------------------------------------------------------ + destinationFormAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceFormAttributeArray.include?(attribute) + $space_sdk.delete_form_attribute_definition(kapp['slug'],attribute) + end + } + end + end -sourceCategories = [] #From import data -destinationCategories = (task_sdk.find_categories().content['categories'] || {}).map{ |category| category['name']} + # ------------------------------------------------------------------------------ + # import bridge models + # *NOTE* - This if the bridge doesn't exist the model will be imported w/ an empty "Bridge Slug" value. + # ------------------------------------------------------------------------------ + def import_bridge_models(core_path,vars) + destinationModels = $space_sdk.find_bridge_models() + destinationModels_Array = (destinationModels.content['models'] || {}).map{ |model| model['name']} + + Dir["#{core_path}/space/models/*.json"].each{ |model| + body = JSON.parse(File.read(model)) + if destinationModels_Array.include?(body['name']) + $space_sdk.update_bridge_model(body['name'], body) + elsif + $space_sdk.add_bridge_model(body) + end + } + sourceModelsArray = Dir["#{core_path}/space/models/*.json"].map{ |model| JSON.parse(File.read(model))['name'] } -Dir["#{task_path}/categories/*.json"].each { |file| - category = JSON.parse(File.read(file)) - sourceCategories.push(category['name']) - if destinationCategories.include?(category['name']) - task_sdk.update_category(category['name'], category) - else - task_sdk.add_category(category) + destinationModels_Array.each do |model| + if vars["options"]["delete"] && !sourceModelsArray.include?(model) + $space_sdk.delete_bridge_model(model) + end + end end -} -# ------------------------------------------------------------------------------ -# delete task categories -# ------------------------------------------------------------------------------ + def import_kapp_form_data(core_path,kapp) + + promises = [] + Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/**/submissions*.ndjson"].sort.each { |filename| + promises << Concurrent::Promise.execute(executor: $pool) do + begin + dir = File.dirname(filename) + form_slug = filename.match(/forms\/(.+)\/submissions\.ndjson/)[1] + + #TODO - Get path to ndjson + #Convert to csv + #Import CSV + convert_json_to_csv(filename) + $space_sdk.import_submissions_csv(kapp['slug'],form_slug,body).content + + #How much of the code below do I need to integrate with above? + + ## This code could delete all submissions from the form before importing new data + ## It is commented out because it could be dangerous to have in place and the delete_submission method doesn't exist currently. + #($space_sdk.find_all_form_submissions(kapp['slug'], form_slug).content['submissions'] || []).each { |submission| + # $space_sdk.delete_submission(submission['id']) + #} + + File.readlines(filename).each { |line| + submission = JSON.parse(line) + submission["values"].map { |field, value| + # if the value contains an array of files + if value.is_a?(Array) && !value.empty? && value.first.is_a?(Hash) && value.first.has_key?('path') + value.map.with_index { |file, index| + # add 'path' key to the attribute value indicating the location of the attachment + file['path'] = "#{dir}#{file['path']}" + } + end + } + body = { + "values" => submission["values"], + "coreState" => submission["coreState"] + } + $space_sdk.add_submission(kapp['slug'], form_slug, body).content + } + rescue => e + $mutex.synchronize do + $logger.error("Failed to import form data from : #{e.message}") + $logger.error(e.backtrace.join("\n")) + end + raise + end + end + } + promises.each(&:wait!) -destinationCategories.each { |category| - if vars["options"]["delete"] && !sourceCategories.include?(category) - task_sdk.delete_category(category) - end -} + $mutex.synchronize { $logger.info("Finished importing form data for kapp #{kapp['slug']}") } -# ------------------------------------------------------------------------------ -# import task policy rules -# ------------------------------------------------------------------------------ + end -destinationPolicyRuleArray = task_sdk.find_policy_rules().content['policyRules'] -sourcePolicyRuleArray = Dir["#{task_path}/policyRules/*.json"].map{ |file| - rule = JSON.parse(File.read(file)) - {"name" => rule['name'], "type" => rule['type']} - } + # ------------------------------------------------------------------------------ + # Import Space Web APIs + # ------------------------------------------------------------------------------ -Dir["#{task_path}/policyRules/*.json"].each { |file| - rule = JSON.parse(File.read(file)) - if !destinationPolicyRuleArray.find {|dest_rule| dest_rule['name']==rule['name'] && dest_rule['type']==rule['type'] }.nil? - task_sdk.update_policy_rule(rule.slice('type', 'name'), rule) - else - task_sdk.add_policy_rule(rule) + def import_space_web_apis(core_path) + sourceSpaceWebApisArray = [] + destinationSpaceWebApisArray = ($space_sdk.find_space_webapis().content['webApis'] || {}).map { |definition| definition['slug']} + promises = [] + Dir["#{core_path}/space/webApis/*"].each{ |file| + promises << Concurrent::Promise.execute(executor: $pool) do + begin + body = JSON.parse(File.read(file)) + if destinationSpaceWebApisArray.include?(body['slug']) + $space_sdk.update_space_webapi(body['slug'], body) + else + $space_sdk.add_space_webapi(body) + end + sourceSpaceWebApisArray.push(body['slug']) + rescue + end + end + } + promises.each(&:wait!) end -} - -# ------------------------------------------------------------------------------ -# delete task policy rules -# ------------------------------------------------------------------------------ -destinationPolicyRuleArray.each { |rule| - if vars["options"]["delete"] && sourcePolicyRuleArray.find {|source_rule| source_rule['name']==rule['name'] && source_rule['type']==rule['type'] }.nil? - task_sdk.delete_policy_rule(rule) + # ------------------------------------------------------------------------------ + # Migrate Kapp Attribute Definitions + # ------------------------------------------------------------------------------ + def import_kapp_attribute_definitions(core_path,kapp) + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/kappAttributeDefinitions.json") + sourceKappAttributeArray = [] + destinationKappAttributeArray = ($space_sdk.find_kapp_attribute_definitions(kapp['slug']).content['kappAttributeDefinitions'] || {}).map { |definition| definition['name']} + kappAttributeDefinitions = JSON.parse(File.read(file)) + (kappAttributeDefinitions || []).each { |attribute| + if destinationKappAttributeArray.include?(attribute['name']) + $space_sdk.update_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute) + else + $space_sdk.add_kapp_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceKappAttributeArray.push(attribute['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Attribute Definitions + # ------------------------------------------------------------------------------ + destinationKappAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceKappAttributeArray.include?(attribute) + $space_sdk.delete_kapp_attribute_definition(kapp['slug'],attribute) + end + } + end end -} - -# ------------------------------------------------------------------------------ -# Delete Trees and Routines not in the Source Data -# ------------------------------------------------------------------------------ - -# identify Trees and Routines on destination -destinationtrees = [] -trees = task_sdk.find_trees().content -(trees['trees'] || []).each { |tree| - destinationtrees.push( tree['title'] ) -} - -# identify Routines in source data -begin - sourceTrees = [] - Dir["#{task_path}/routines/*.xml"].each {|routine| - doc = REXML::Document.new(File.new(routine)) - root = doc.root - sourceTrees.push("#{root.elements["taskTree/name"].text}") - } -rescue - logger.error "Error while identifying routines" -end + # ------------------------------------------------------------------------------ + # Import Kapp Forms + # ------------------------------------------------------------------------------ + def import_forms(core_path,kapp, vars) + if (forms = Dir["#{core_path}/space/kapps/#{kapp['slug']}/forms/*.json"]).length > 0 + sourceForms = [] #From import data + #destinationForms = ($space_sdk.find_forms(kapp['slug']).content['forms'] || {}).map{ |form| form['slug']} + destinationForms = ($space_sdk.find_forms(kapp['slug'],{'export'=>'true'}).content['forms'] || {}) + $logger.info ("Iterating kapp forms") + promises = [] + + + forms.each do |form| + promises << Concurrent::Promise.execute(executor: $pool) do + begin + properties = File.read(form) + form = JSON.parse(properties) + $mutex.synchronize do + $logger.info "Currently #{form['slug']}" + sourceForms.push(form['slug']) + end + + prev_form = (destinationForms.find { |f| f["slug"] == form['slug'] }) + if !prev_form.nil? + #Compare old and new forms + #$space_sdk.compare_forms(destinationForms["#{form['slug']}"], form ) + #Check last updated date/time and compare + $mutex.synchronize { $logger.info("Comparing previous and current form exports for #{form['slug']}") } + match = (form == prev_form) + #Skip if forms match + if !match + $mutex.synchronize { $logger.info("Updating form #{form['slug']}") } + $space_sdk.update_form(kapp['slug'] ,form['slug'], form) + else + $mutex.synchronize { $logger.info("Form #{form['slug']} updatedAt values match, skipping...") } + end + else + $mutex.synchronize { $logger.info("Adding new form #{form['slug']}") } + $space_sdk.add_form(kapp['slug'], form) + end + rescue => e + $mutex.synchronize do + $logger.error("Failed to import form from #{form_file}: #{e.message}") + $logger.error(e.backtrace.join("\n")) + end + raise + end + end + end -begin - # identify trees in source data - Dir["#{task_path}/sources/*"].each {|source| - if File.directory? source - Dir["#{source}/trees/*.xml"].each { |tree| - doc = REXML::Document.new(File.new(tree)) - root = doc.root - tree = "#{root.elements["sourceName"].text} :: #{root.elements["sourceGroup"].text} :: #{root.elements["taskTree/name"].text}" - sourceTrees.push(tree) + promises.each(&:wait!) + + $mutex.synchronize { $logger.info("Finished importing #{sourceForms.size} forms for kapp #{kapp['slug']}") } + + # ------------------------------------------------------------------------------ + # delete forms + # ------------------------------------------------------------------------------ + destinationForms.each { |slug| + if vars["options"]["delete"] && !sourceForms.include?(slug) + #Delete form is disabled + #$space_sdk.delete_form(kapp['slug'], slug) + end + } + end + end + # ------------------------------------------------------------------------------ + # Migrate Kapp Category Definitions + # ------------------------------------------------------------------------------ + def import_kapp_category_definitions(core_path,kapp) + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/categoryAttributeDefinitions.json") + sourceKappCategoryArray = [] + destinationKappAttributeArray = ($space_sdk.find_category_attribute_definitions(kapp['slug']).content['categoryAttributeDefinitions'] || {}).map { |definition| definition['name']} + kappCategoryDefinitions = JSON.parse(File.read(file)) + (kappCategoryDefinitions || []).each { |attribute| + if destinationKappAttributeArray.include?(attribute['name']) + $space_sdk.update_category_attribute_definition(kapp['slug'], attribute['name'], attribute) + else + $space_sdk.add_category_attribute_definition(kapp['slug'], attribute['name'], attribute['description'], attribute['allowsMultiple']) + end + sourceKappCategoryArray.push(attribute['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Category Definitions + # ------------------------------------------------------------------------------ + destinationKappAttributeArray.each { | attribute | + if vars["options"]["delete"] && !sourceKappCategoryArray.include?(attribute) + $space_sdk.delete_category_attribute_definition(kapp['slug'],attribute) + end } end - } -rescue - logger.error "Error identifying trees" -end + end -begin - # Delete the extra tress and routines on the source - destinationtrees.each { | tree | - if vars["options"]["delete"] && !sourceTrees.include?(tree) - treeDef = tree.split(' :: ') - task_sdk.delete_tree( tree ) + def import_kapp_form_type_definitions(core_pathh,kapp) + # ------------------------------------------------------------------------------ + # Migrate Kapp Form Type Definitions + # ------------------------------------------------------------------------------ + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/formTypes.json") + sourceFormTypesArray = [] + destinationFormTypesArray = ($space_sdk.find_formtypes(kapp['slug']).content['formTypes'] || {}).map { |formTypes| formTypes['name']} + formTypes = JSON.parse(File.read(file)) + (formTypes || []).each { |body| + if destinationFormTypesArray.include?(body['name']) + $space_sdk.update_formtype(kapp['slug'], body['name'], body) + else + $space_sdk.add_formtype(kapp['slug'], body) + end + sourceFormTypesArray.push(body['name']) + } + # ------------------------------------------------------------------------------ + # Delete Kapp Form Type Definitions + # ------------------------------------------------------------------------------ + destinationFormTypesArray.each { | name | + if vars["options"]["delete"] && !sourceFormTypesArray.include?(name) + $space_sdk.delete_formtype(kapp['slug'],name) + end + } end - } -rescue - logger.error "Error deleting extra trees/routines on source" -end - + end + def import_kapp_web_apis(core_path,kapp) + # ------------------------------------------------------------------------------ + # Add Kapp Web APIs + # ------------------------------------------------------------------------------ + sourceWebApisArray = [] + destinationWebApisArray = ($space_sdk.find_kapp_webapis(kapp['slug']).content['webApis'] || {}).map { |definition| definition['slug']} + Dir["#{core_path}/space/kapps/#{kapp['slug']}/webApis/*"].each { |webApi| + body = JSON.parse(File.read(webApi)) + if destinationWebApisArray.include?(body['slug']) + $space_sdk.update_kapp_webapi(kapp['slug'], body['slug'], body) + else + $space_sdk.add_kapp_webapi(kapp['slug'], body) + end + sourceWebApisArray.push(body['slug']) + } + end -# Import v6 workflows as these are not not the same as Trees and Routines -logger.info "Importing workflows" -space_sdk.import_workflows(vars["core"]["space_slug"]) + def import_kapp_security_policy_definitions(core_path,kapp) + # ------------------------------------------------------------------------------ + # Migrate Kapp Security Policy Definitions + # ------------------------------------------------------------------------------ + if File.file?(file = "#{core_path}/space/kapps/#{kapp['slug']}/securityPolicyDefinitions.json") + sourceSecurtyPolicyArray = [] + destinationSecurtyPolicyArray = ($space_sdk.find_security_policy_definitions(kapp['slug']).content['securityPolicyDefinitions'] || {}).map { |definition| definition['name']} + securityPolicyDefinitions = JSON.parse(File.read(file)) + (securityPolicyDefinitions || []).each { |attribute| + if destinationSecurtyPolicyArray.include?(attribute['name']) + $space_sdk.update_security_policy_definition(kapp['slug'], attribute['name'], attribute) + else + $space_sdk.add_security_policy_definition(kapp['slug'], attribute) + end + sourceSecurtyPolicyArray.push(attribute['name']) + } -# ------------------------------------------------------------------------------ -# complete -# ------------------------------------------------------------------------------ + destinationSecurtyPolicyArray.each { | attribute | + if vars["options"]["delete"] && !sourceSecurtyPolicyArray.include?(attribute) + $space_sdk.delete_security_policy_definition(kapp['slug'],attribute) + end + } + end + end -logger.info "Finished importing the \"#{template_name}\" forms." +starting = Process.clock_gettime(Process::CLOCK_MONOTONIC) +import_space() +ending = Process.clock_gettime(Process::CLOCK_MONOTONIC) +elapsed = ending - starting +puts "Time: #{elapsed}" \ No newline at end of file