Katello 3.17 to 3.18, cannot migrate to pulp3

I even tried puttin the rake code into a file, and ran foremank-rake console < filename

Same output

[root@katello ~]# foreman-rake console < stats.rake
Loading production environment (Rails 6.0.3.4)
Switch to inspect mode.
namespace :katello do
  desc "Retrieve Pulp 2 -> Pulp 3 content migration stats"
  task :pulp3_migration_stats => [:environment] do
    User.current = User.anonymous_admin

    migrated_rpm_count = ::Katello::Rpm.where('pulp_id LIKE ?', '%/pulp/api/v3/content/rpm/packages/%').
      or(::Katello::Rpm.where.not(migrated_pulp3_href: nil)).count
    migrated_erratum_count = ::Katello::RepositoryErratum.where.not(erratum_pulp3_href: nil).count
    migrated_repo_count = ::Katello::Repository.where.not(version_href: nil).count
    migratable_repo_count = ::Katello::Repository.count - ::Katello::Repository.puppet_type.count -
      ::Katello::Repository.ostree_type.count - ::Katello::Repository.deb_type.count

    on_demand_rpm_count = Katello::RepositoryRpm.where(:repository_id => Katello::Repository.yum_type.on_demand).distinct.count
    on_demand_unmigrated_rpm_count = on_demand_rpm_count - migrated_rpm_count
    immediate_unmigrated_rpm_count = ::Katello::Rpm.count - migrated_rpm_count - on_demand_unmigrated_rpm_count

    # On Demand RPMs: (6.46E-04)*(#RPMs) + -3.22
    # Immediate RPMs: (9.39E-04)*(#RPMs) + -3
    # Repositories: 0.0746*(#Repos) + -2.07
    migration_minutes = (0.000646 * on_demand_unmigrated_rpm_count - 3.22 +
                         0.000943 * immediate_unmigrated_rpm_count - 3 +
                         0.0746 * migratable_repo_count).to_i
    hours = (migration_minutes / 60) % 60
    minutes = migration_minutes % 60

    puts "============Migration Summary================"
    puts "Migrated/Total RPMs: #{migrated_rpm_count}/#{::Katello::Rpm.count}"
    puts "Migrated/Total errata: #{migrated_erratum_count}/#{::Katello::RepositoryErratum.count}"
    puts "Migrated/Total repositories: #{migrated_repo_count}/#{migratable_repo_count}"
    puts
    # The timing formulas go negative if the amount of content is negligibly small
    if migration_minutes >= 5
      puts "Estimated migration time based on yum content: #{hours} hours, #{minutes} minutes"
    else
      puts "Estimated migration time based on yum content: fewer than 5 minutes"
    end

    puts
    puts "\e[33mNote:\e[0m ensure there is sufficient storage space for /var/lib/pulp/published to triple in size before starting the migration process."
    puts "Check the size of /var/lib/pulp/published with 'du -sh /var/lib/pulp/published/'"

    displayed_warning = false
    found_missing = false
    path = Dir.mktmpdir('unmigratable_content-')
    Katello::Pulp3::Migration::CORRUPTABLE_CONTENT_TYPES.each do |type|
      if type.missing_migrated_content.any?
        unless displayed_warning
          displayed_warning = true
          puts
          puts "============Missing/Corrupted Content Summary================"
          puts "WARNING: MISSING OR CORRUPTED CONTENT DETECTED"
        end

        found_missing = true
        name = type.name.demodulize
        puts "Corrupted or Missing #{name}: #{type.missing_migrated_content.count}/#{type.count}"

        File.open(File.join(path, name), 'w') do |file|
          text = type.missing_migrated_content.map(&:filename).join("\n") + "\n"
          file.write(text)
        end
      end
    end

    if found_missing
      puts "Corrupted or missing content has been detected, you can examine the list of content in #{path} and take action by either:"
      puts "1. Performing a 'Verify Checksum' sync under Advanced Sync Options, let it complete, and re-running the migration"
      puts "2. Deleting/disabling the affected repositories and running orphan cleanup (foreman-rake katello:delete_orphaned_content) and re-running the migration"
      puts "3. Manually correcting files on the filesystem in /var/lib/pulp/content/ and re-running the migration"
      puts "4. Mark currently corrupted or missing content as skipped (foreman-rake katello:approve_corrupted_migration_content).  This will skip migration of missing or corrupted content."
      puts
    end
  end
end
#<Rake::NameSpace:0x000000000d8cc9b0 @task_manager=#<Rake::Application:0x00000000023ebb90 @tasks={"db:load_config"=><Rake::Task db:load_config => [environment]>, "db:environment:set"=><Rake::Task db:environment:set => [load_config]>, "db:check_protected_environments"=><Rake::Task db:check_protected_environments => [load_config]>, "db:create:all"=><Rake::Task db:create:all => [load_config]>, "db:create"=><Rake::Task db:create => [load_config]>, "db:drop:all"=><Rake::Task db:drop:all => [load_config, check_protected_environments]>, "db:drop"=><Rake::Task db:drop => [load_config, check_protected_environments]>, "db:drop:_unsafe"=><Rake::Task db:drop:_unsafe => [load_config]>, "db:purge:all"=><Rake::Task db:purge:all => [load_config, check_protected_environments]>, "db:truncate_all"=><Rake::Task db:truncate_all => [load_config, check_protected_environments]>, "db:purge"=><Rake::Task db:purge => [load_config, check_protected_environments]>, "db:migrate"=><Rake::Task db:migrate => [load_config, plugin:refresh_migrations]>, "db:_dump"=><Rake::Task db:_dump => []>, "db:migrate:redo"=><Rake::Task db:migrate:redo => [load_config]>, "db:migrate:reset"=><Rake::Task db:migrate:reset => [db:drop, db:create, db:migrate]>, "db:migrate:up"=><Rake::Task db:migrate:up => [load_config]>, "db:migrate:down"=><Rake::Task db:migrate:down => [load_config]>, "db:migrate:status"=><Rake::Task db:migrate:status => [load_config]>, "db:rollback"=><Rake::Task db:rollback => [load_config]>, "db:forward"=><Rake::Task db:forward => [load_config]>, "db:reset"=><Rake::Task db:reset => [db:drop, db:setup]>, "db:charset"=><Rake::Task db:charset => [load_config]>, "db:collation"=><Rake::Task db:collation => [load_config]>, "db:version"=><Rake::Task db:version => [load_config]>, "db:abort_if_pending_migrations"=><Rake::Task db:abort_if_pending_migrations => [load_config]>, "db:setup"=><Rake::Task db:setup => [db:schema:load_if_ruby, db:structure:load_if_sql, seed]>, "db:prepare"=><Rake::Task db:prepare => [load_config]>, "db:seed"=><Rake::Task db:seed => [load_config]>, "db:seed:replant"=><Rake::Task db:seed:replant => [load_config, truncate_all, seed]>, "db:fixtures:load"=><Rake::Task db:fixtures:load => [load_config]>, "db:fixtures:identify"=><Rake::Task db:fixtures:identify => [load_config]>, "db:schema:dump"=><Rake::Task db:schema:dump => [load_config]>, "db:schema:load"=><Rake::Task db:schema:load => [load_config, check_protected_environments]>, "db:schema:load_if_ruby"=><Rake::Task db:schema:load_if_ruby => [db:create, environment]>, "db:schema:cache:dump"=><Rake::Task db:schema:cache:dump => [load_config]>, "db:schema:cache:clear"=><Rake::Task db:schema:cache:clear => [load_config]>, "db:structure:dump"=><Rake::Task db:structure:dump => [load_config]>, "db:structure:load"=><Rake::Task db:structure:load => [load_config, check_protected_environments]>, "db:structure:load_if_sql"=><Rake::Task db:structure:load_if_sql => [db:create, environment]>, "db:test:load"=><Rake::Task db:test:load => [db:test:purge]>, "db:test:load_schema"=><Rake::Task db:test:load_schema => [db:test:purge]>, "db:test:load_structure"=><Rake::Task db:test:load_structure => [db:test:purge]>, "db:test:purge"=><Rake::Task db:test:purge => [load_config, check_protected_environments]>, "db:test:prepare"=><Rake::Task db:test:prepare => [load_config]>, "railties:install:migrations"=><Rake::Task railties:install:migrations => [db:load_config]>, "default"=><Rake::Task default => [test]>, "test"=><Rake::Task test => [test:foreman_remote_execution]>, "test:prepare"=><Rake::Task test:prepare => []>, "test:run"=><Rake::Task test:run => [test]>, "test:db"=><Rake::Task test:db => [db:test:prepare, test]>, "test:models"=><Rake::Task test:models => [test:prepare]>, "test:helpers"=><Rake::Task test:helpers => [test:prepare]>, "test:channels"=><Rake::Task test:channels => [test:prepare]>, "test:controllers"=><Rake::Task test:controllers => [test:prepare]>, "test:mailers"=><Rake::Task test:mailers => [test:prepare]>, "test:integration"=><Rake::Task test:integration => [test:prepare]>, "test:jobs"=><Rake::Task test:jobs => [test:prepare]>, "test:mailboxes"=><Rake::Task test:mailboxes => [test:prepare]>, "test:generators"=><Rake::Task test:generators => [test:prepare]>, "test:units"=><Rake::Task test:units => [test:prepare]>, "test:functionals"=><Rake::Task test:functionals => [test:prepare]>, "test:system"=><Rake::Task test:system => [test:prepare]>, "assets:environment"=><Rake::Task assets:environment => []>, "assets:precompile"=><Rake::Task assets:precompile => [environment, yarn:install]>, "assets:clean"=><Rake::Task assets:clean => [environment]>, "assets:clobber"=><Rake::Task assets:clobber => [environment]>, "apipie_dsl:cache"=><Rake::Task apipie_dsl:cache => [environment]>, "apipie_dsl:static"=><Rake::Task apipie_dsl:static => [environment]>, "apipie_dsl:static_json"=><Rake::Task apipie_dsl:static_json => [environment]>, "secure_headers:generate_hashes"=><Rake::Task secure_headers:generate_hashes => []>, "db:sessions:create"=><Rake::Task db:sessions:create => [environment, db:load_config]>, "db:sessions:clear"=><Rake::Task db:sessions:clear => [environment, db:load_config]>, "db:sessions:trim"=><Rake::Task db:sessions:trim => [environment, db:load_config]>, "webpack:compile"=><Rake::Task webpack:compile => [environment]>, "graphql:upgrade"=><Rake::Task graphql:upgrade => []>, "graphql:upgrade:create_base_objects"=><Rake::Task graphql:upgrade:create_base_objects => []>, "graphql:upgrade:schema"=><Rake::Task graphql:upgrade:schema => []>, "graphql:upgrade:member"=><Rake::Task graphql:upgrade:member => []>, "deface:test_selector"=><Rake::Task deface:test_selector => [environment]>, "deface:get_result"=><Rake::Task deface:get_result => [environment]>, "deface:test_all"=><Rake::Task deface:test_all => [environment]>, "deface:failures_by_virtual_path"=><Rake::Task deface:failures_by_virtual_path => [environment]>, "deface:precompile"=><Rake::Task deface:precompile => [environment, clean]>, "deface:clean"=><Rake::Task deface:clean => []>, "cache_digests:nested_dependencies"=><Rake::Task cache_digests:nested_dependencies => [environment]>, "cache_digests:dependencies"=><Rake::Task cache_digests:dependencies => [environment]>, "apipie:static"=><Rake::Task apipie:static => [environment]>, "apipie:static_json"=><Rake::Task apipie:static_json => [environment]>, "apipie:static_swagger_json"=><Rake::Task apipie:static_swagger_json => [environment]>, "apipie:did_swagger_change"=><Rake::Task apipie:did_swagger_change => [environment]>, "apipie:cache"=><Rake::Task apipie:cache => [environment]>, "apipie:client"=><Rake::Task apipie:client => []>, "apipie:update_from_routes"=><Rake::Task apipie:update_from_routes => [environment]>, "apipie:convert_examples"=><Rake::Task apipie:convert_examples => [environment]>, "dynflow:client"=><Rake::Task dynflow:client => [environment]>, "foreman_tasks:export_tasks"=><Rake::Task foreman_tasks:export_tasks => [environment, dynflow:client]>, "foreman_tasks:cleanup:run"=><Rake::Task foreman_tasks:cleanup:run => [environment, dynflow:client]>, "foreman_tasks:cleanup:config"=><Rake::Task foreman_tasks:cleanup:config => [environment, dynflow:client]>, "foreman_tasks:cleanup"=><Rake::Task foreman_tasks:cleanup => [cleanup:run]>, "foreman_tasks:generate_task_actions"=><Rake::Task foreman_tasks:generate_task_actions => [environment]>, "foreman_tasks:install:migrations"=><Rake::Task foreman_tasks:install:migrations => []>, "katello:clean_backend_objects"=><Rake::Task katello:clean_backend_objects => [environment, check_ping]>, "katello:clean_old_file_repos"=><Rake::Task katello:clean_old_file_repos => [environment]>, "katello:clean_published_repo_directories"=><Rake::Task katello:clean_published_repo_directories => [environment]>, "katello:delete_orphaned_content"=><Rake::Task katello:delete_orphaned_content => [dynflow:client]>, "katello:import_applicability"=><Rake::Task katello:import_applicability => [environment]>, "katello:import_subscriptions"=><Rake::Task katello:import_subscriptions => [environment]>, "jenkins:katello"=><Rake::Task jenkins:katello => []>, "jenkins:katello:spec"=><Rake::Task jenkins:katello:spec => []>, "jenkins:katello:test"=><Rake::Task jenkins:katello:test => []>, "katello:sync_ansible_job_templates"=><Rake::Task katello:sync_ansible_job_templates => [environment]>, "katello:pulp3_content_switchover"=><Rake::Task katello:pulp3_content_switchover => [dynflow:client]>, "katello:pulp3_migration"=><Rake::Task katello:pulp3_migration => [dynflow:client]>, "katello:pulp3_migration_abort"=><Rake::Task katello:pulp3_migration_abort => [dynflow:client]>, "katello:approve_corrupted_migration_content"=><Rake::Task katello:approve_corrupted_migration_content => [dynflow:client, check_ping]>, "katello:unapprove_corrupted_migration_content"=><Rake::Task katello:unapprove_corrupted_migration_content => [dynflow:client, check_ping]>, "katello:pulp3_migration_reset"=><Rake::Task katello:pulp3_migration_reset => [dynflow:client, check_ping]>, "katello:pulp3_migration_stats"=><Rake::Task katello:pulp3_migration_stats => [environment]>, "katello:pulp3_post_migration_check"=><Rake::Task katello:pulp3_post_migration_check => [dynflow:client]>, "katello:receptor:extract_orgs"=><Rake::Task katello:receptor:extract_orgs => [dynflow:client, check_ping]>, "katello:regenerate_ueber_certs"=><Rake::Task katello:regenerate_ueber_certs => [environment]>, "katello:check_ping"=><Rake::Task katello:check_ping => [environment]>, "katello:reimport"=><Rake::Task katello:reimport => [dynflow:client, katello:check_ping]>, "reports:expire"=><Rake::Task reports:expire => [environment]>, "reports:daily"=><Rake::Task reports:daily => [dynflow:client, environment]>, "reports:weekly"=><Rake::Task reports:weekly => [dynflow:client, environment]>, "reports:monthly"=><Rake::Task reports:monthly => [dynflow:client, environment]>, "katello:publish_unpublished_repositories"=><Rake::Task katello:publish_unpublished_repositories => [dynflow:client, check_ping]>, "katello:regenerate_repo_metadata"=><Rake::Task katello:regenerate_repo_metadata => [dynflow:client, check_ping]>, "katello:refresh_pulp_repo_details"=><Rake::Task katello:refresh_pulp_repo_details => [dynflow:client, check_ping]>, "katello:correct_repositories"=><Rake::Task katello:correct_repositories => [environment, check_ping]>, "katello:correct_puppet_environments"=><Rake::Task katello:correct_puppet_environments => [environment, check_ping]>, "katello:change_download_policy"=><Rake::Task katello:change_download_policy => [environment, check_ping]>, "katello:refresh_sync_schedule"=><Rake::Task katello:refresh_sync_schedule => [environment, check_ping]>, "katello:reset_backends:pulp_legacy"=><Rake::Task katello:reset_backends:pulp_legacy => []>, "katello:reset_backends:pulp"=><Rake::Task katello:reset_backends:pulp => []>, "katello:reset_backends:candlepin"=><Rake::Task katello:reset_backends:candlepin => []>, "katello:reset_default_smart_proxy"=><Rake::Task katello:reset_default_smart_proxy => []>, "katello:reset_backends"=><Rake::Task katello:reset_backends => []>, "katello:reset"=><Rake::Task katello:reset => [environment]>, "katello:rubocop"=><Rake::Task katello:rubocop => []>, "katello:rubocop:jenkins"=><Rake::Task katello:rubocop:jenkins => []>, "test:katello:spec"=><Rake::Task test:katello:spec => [db:test:prepare]>, "test:katello:spec:routing"=><Rake::Task test:katello:spec:routing => [db:test:prepare]>, "test:katello:test"=><Rake::Task test:katello:test => [db:test:prepare]>, "test:katello:test:models"=><Rake::Task test:katello:test:models => [db:test:prepare]>, "test:katello:test:actions"=><Rake::Task test:katello:test:actions => [db:test:prepare]>, "test:katello:test:controllers"=><Rake::Task test:katello:test:controllers => [db:test:prepare]>, "test:katello:test:live_scenarios"=><Rake::Task test:katello:test:live_scenarios => [db:test:prepare]>, "test:katello:test:glue"=><Rake::Task test:katello:test:glue => [db:test:prepare]>, "test:katello:test:services"=><Rake::Task test:katello:test:services => [db:test:prepare]>, "test:katello:test:pulpcore"=><Rake::Task test:katello:test:pulpcore => [db:test:prepare]>, "test:katello:test:lib"=><Rake::Task test:katello:test:lib => [db:test:prepare]>, "test:katello"=><Rake::Task test:katello => []>, "katello:unify_hosts"=><Rake::Task katello:unify_hosts => [environment]>, "katello:update_default_http_proxy"=><Rake::Task katello:update_default_http_proxy => [environment]>, "katello:update_subscription_facet_backend_data"=><Rake::Task katello:update_subscription_facet_backend_data => [environment]>, "katello:upgrade_check"=><Rake::Task katello:upgrade_check => [environment]>, "katello:upgrades:3.10:clear_invalid_repo_credentials"=><Rake::Task katello:upgrades:3.10:clear_invalid_repo_credentials => [environment]>, "katello:upgrades:3.10:update_gpg_key_urls"=><Rake::Task katello:upgrades:3.10:update_gpg_key_urls => [environment, katello:check_ping]>, "katello:upgrades:3.11:import_yum_metadata"=><Rake::Task katello:upgrades:3.11:import_yum_metadata => [environment]>, "katello:upgrades:3.11:update_puppet_repos"=><Rake::Task katello:upgrades:3.11:update_puppet_repos => [environment]>, "katello:upgrades:3.12:remove_pulp2_notifier"=><Rake::Task katello:upgrades:3.12:remove_pulp2_notifier => [environment]>, "katello:upgrades:3.13:republish_deb_metadata"=><Rake::Task katello:upgrades:3.13:republish_deb_metadata => [environment]>, "katello:upgrades:3.15:reindex_rpm_modular"=><Rake::Task katello:upgrades:3.15:reindex_rpm_modular => [environment]>, "katello:upgrades:3.15:set_sub_facet_dmi_uuid"=><Rake::Task katello:upgrades:3.15:set_sub_facet_dmi_uuid => [environment]>, "katello:upgrades:3.16:update_applicable_el8_hosts"=><Rake::Task katello:upgrades:3.16:update_applicable_el8_hosts => [environment]>, "katello:upgrades:3.18:add_cvv_export_history_metadata"=><Rake::Task katello:upgrades:3.18:add_cvv_export_history_metadata => [environment, check_ping]>, "katello:upgrades:3.8:clear_checksum_type"=><Rake::Task katello:upgrades:3.8:clear_checksum_type => [environment]>, "katello:virt_who_report"=><Rake::Task katello:virt_who_report => [environment, check_ping]>, "katello:install:migrations"=><Rake::Task katello:install:migrations => []>, "foreman_remote_execution:install:migrations"=><Rake::Task foreman_remote_execution:install:migrations => []>, "foreman_remote_execution:example:task"=><Rake::Task foreman_remote_execution:example:task => [environment]>, "test:foreman_remote_execution"=><Rake::Task test:foreman_remote_execution => [db:test:prepare]>, "foreman_remote_execution:rubocop"=><Rake::Task foreman_remote_execution:rubocop => []>, "apipie:cache:index"=><Rake::Task apipie:cache:index => []>, "audits:expire"=><Rake::Task audits:expire => [environment]>, "audits:anonymize"=><Rake::Task audits:anonymize => [environment]>, "auth_source_external:create"=><Rake::Task auth_source_external:create => [environment]>, "db:dump"=><Rake::Task db:dump => [environment]>, "db:import_dump"=><Rake::Task db:import_dump => [environment]>, "bundler:deps"=><Rake::Task bundler:deps => [environment]>, "purge:trends"=><Rake::Task purge:trends => [environment]>, "purge:all"=><Rake::Task purge:all => [purge:trends]>, "purge_data"=><Rake::Task purge_data => [purge:all]>, "config"=><Rake::Task config => [environment]>, "console"=><Rake::Task console => [dynflow:client]>, "db:convert:prod2dev"=><Rake::Task db:convert:prod2dev => [environment]>, "db:sequence:reset"=><Rake::Task db:sequence:reset => [environment]>, "dynflow:executor"=><Rake::Task dynflow:executor => [environment]>, "dynflow:migrate"=><Rake::Task dynflow:migrate => [environment]>, "dynflow:abort_if_pending_migrations"=><Rake::Task dynflow:abort_if_pending_migrations => [environment]>, "security:generate_encryption_key"=><Rake::Task security:generate_encryption_key => []>, "db:encrypt_all"=><Rake::Task db:encrypt_all => []>, "db:decrypt_all"=><Rake::Task db:decrypt_all => []>, "db:auth_sources_ldap:encrypt"=><Rake::Task db:auth_sources_ldap:encrypt => [environment]>, "db:auth_sources_ldap:decrypt"=><Rake::Task db:auth_sources_ldap:decrypt => [environment]>, "db:compute_resources:encrypt"=><Rake::Task db:compute_resources:encrypt => [environment]>, "db:compute_resources:decrypt"=><Rake::Task db:compute_resources:decrypt => [environment]>, "errors:fetch_log"=><Rake::Task errors:fetch_log => [environment]>, "exception:codes"=><Rake::Task exception:codes => [environment]>, "exports:csv"=><Rake::Task exports:csv => [environment]>, "facts:clean"=><Rake::Task facts:clean => [environment]>, "fix_db_cache"=><Rake::Task fix_db_cache => [environment]>, "fix_db_cache:delete_old_cache"=><Rake::Task fix_db_cache:delete_old_cache => []>, "fix_db_cache:create_new_cache"=><Rake::Task fix_db_cache:create_new_cache => []>, "fix_db_cache:cache_filter_searches"=><Rake::Task fix_db_cache:cache_filter_searches => []>, "hosts:scan_out_of_sync"=><Rake::Task hosts:scan_out_of_sync => [environment]>, "interfaces:clean"=><Rake::Task interfaces:clean => [environment]>, "ldap:refresh_usergroups"=><Rake::Task ldap:refresh_usergroups => [environment]>, "ldap:remove_deleted_users"=><Rake::Task ldap:remove_deleted_users => [environment]>, "locale:find_model"=><Rake::Task locale:find_model => [gettext:store_model_attributes]>, "locale:find_code"=><Rake::Task locale:find_code => [gettext:find]>, "locale:find"=><Rake::Task locale:find => [find_model, find_code]>, "locale:po_to_json"=><Rake::Task locale:po_to_json => [locale/ca/foreman.po, locale/cs_CZ/foreman.po, locale/de/foreman.po, locale/en/foreman.po, locale/en_GB/foreman.po, locale/es/foreman.po, locale/fr/foreman.po, locale/gl/foreman.po, locale/it/foreman.po, locale/ja/foreman.po, locale/ko/foreman.po, locale/nl_NL/foreman.po, locale/pl/foreman.po, locale/pt_BR/foreman.po, locale/ru/foreman.po, locale/sv_SE/foreman.po, locale/zh_CN/foreman.po, locale/zh_TW/foreman.po, gettext:po_to_json]>, "locale:pack"=><Rake::Task locale:pack => [gettext:pack]>, "log"=><Rake::Task log => [environment]>, "models:consolidate"=><Rake::Task models:consolidate => [environment]>, "notifications:clean"=><Rake::Task notifications:clean => [environment]>, "orchestration:dhcp:add_missing"=><Rake::Task orchestration:dhcp:add_missing => [environment]>, "orchestration:dhcp:remove_offending"=><Rake::Task orchestration:dhcp:remove_offending => [environment]>, "parameters:reset_priorities"=><Rake::Task parameters:reset_priorities => [environment]>, "parameters:cast_key_types_and_values"=><Rake::Task parameters:cast_key_types_and_values => [environment]>, "pkg:deb"=><Rake::Task pkg:deb => []>, "pkg:generate_source"=><Rake::Task pkg:generate_source => []>, "plugin:apipie:cache"=><Rake::Task plugin:apipie:cache => []>, "plugin:assets:precompile"=><Rake::Task plugin:assets:precompile => [environment]>, "plugin:list"=><Rake::Task plugin:list => [environment]>, "plugin:validate_roles"=><Rake::Task plugin:validate_roles => [environment]>, "plugin:refresh_migrations"=><Rake::Task plugin:refresh_migrations => [environment]>, "clean"=><Rake::Task clean => []>, "clobber"=><Rake::Task clobber => [clean]>, "puppet:import:hosts_and_facts"=><Rake::Task puppet:import:hosts_and_facts => [environment]>, "puppet:import:external_nodes"=><Rake::Task puppet:import:external_nodes => [environment]>, "rescan_reports_origins"=><Rake::Task rescan_reports_origins => [environment]>, "permissions:reset"=><Rake::Task permissions:reset => [environment]>, "rss:create_notifications"=><Rake::Task rss:create_notifications => [environment]>, "security:generate_token"=><Rake::Task security:generate_token => []>, "seed:forgeries"=><Rake::Task seed:forgeries => [forgeries:all]>, "seed:forgeries:load_factories"=><Rake::Task seed:forgeries:load_factories => [environment]>, "seed:forgeries:domains"=><Rake::Task seed:forgeries:domains => [load_factories]>, "seed:forgeries:hosts"=><Rake::Task seed:forgeries:hosts => [load_factories]>, "seed:forgeries:operatingsystems"=><Rake::Task seed:forgeries:operatingsystems => [load_factories]>, "seed:forgeries:organizations"=><Rake::Task seed:forgeries:organizations => [load_factories]>, "seed:forgeries:all"=><Rake::Task seed:forgeries:all => [operatingsystems, organizations, domains, hosts]>, "snapshots:generate"=><Rake::Task snapshots:generate => [environment]>, "telemetry:metrics"=><Rake::Task telemetry:metrics => [environment]>, "telemetry:prometheus_statsd"=><Rake::Task telemetry:prometheus_statsd => [environment]>, "templates:render"=><Rake::Task templates:render => [environment]>, "test:api"=><Rake::Task test:api => []>, "test:graphql"=><Rake::Task test:graphql => []>, "trends:counter"=><Rake::Task trends:counter => [environment]>, "trends:reduce"=><Rake::Task trends:reduce => [environment]>, "upgrade:run"=><Rake::Task upgrade:run => [environment]>, "notes"=><Rake::Task notes => [environment]>, "notes:optimize"=><Rake::Task notes:optimize => [environment]>, "notes:fixme"=><Rake::Task notes:fixme => [environment]>, "notes:todo"=><Rake::Task notes:todo => [environment]>, "notes:custom"=><Rake::Task notes:custom => [environment]>, "dev:cache"=><Rake::Task dev:cache => [environment]>, "app:update"=><Rake::Task app:update => [update:configs, update:bin, update:active_storage, update:upgrade_guide_info]>, "app:template"=><Rake::Task app:template => [environment]>, "app:templates:copy"=><Rake::Task app:templates:copy => []>, "app:update:configs"=><Rake::Task app:update:configs => []>, "app:update:bin"=><Rake::Task app:update:bin => []>, "app:update:active_storage"=><Rake::Task app:update:active_storage => []>, "app:update:upgrade_guide_info"=><Rake::Task app:update:upgrade_guide_info => []>, "initializers"=><Rake::Task initializers => [environment]>, "log:clear"=><Rake::Task log:clear => []>, "middleware"=><Rake::Task middleware => [environment]>, "secret"=><Rake::Task secret => []>, "about"=><Rake::Task about => [environment]>, "time:zones"=><Rake::Task time:zones => []>, "time:zones:all"=><Rake::Task time:zones:all => []>, "time:zones:us"=><Rake::Task time:zones:us => []>, "time:zones:local"=><Rake::Task time:zones:local => []>, "restart"=><Rake::Task restart => []>, "routes"=><Rake::Task routes => [environment]>, "tmp:clear"=><Rake::Task tmp:clear => [tmp:cache:clear, tmp:sockets:clear, tmp:screenshots:clear]>, "tmp/cache"=><Rake::FileCreationTask tmp/cache => []>, "tmp"=><Rake::FileCreationTask tmp => []>, "tmp/sockets"=><Rake::FileCreationTask tmp/sockets => []>, "tmp/pids"=><Rake::FileCreationTask tmp/pids => []>, "tmp/cache/assets"=><Rake::FileCreationTask tmp/cache/assets => []>, "tmp:create"=><Rake::Task tmp:create => [tmp/cache, tmp/sockets, tmp/pids, tmp/cache/assets]>, "tmp:cache:clear"=><Rake::Task tmp:cache:clear => []>, "tmp:sockets:clear"=><Rake::Task tmp:sockets:clear => []>, "tmp:pids:clear"=><Rake::Task tmp:pids:clear => []>, "tmp:screenshots:clear"=><Rake::Task tmp:screenshots:clear => []>, "yarn:install"=><Rake::Task yarn:install => []>, "zeitwerk:check"=><Rake::Task zeitwerk:check => [environment]>, "stats"=><Rake::Task stats => []>, "environment"=><Rake::Task environment => []>}, @rules=[], @scope=LL(), @last_description="Retrieve Pulp 2 -> Pulp 3 content migration stats", @name="rake", @rakefiles=["rakefile", "Rakefile", "rakefile.rb", "Rakefile.rb"], @rakefile="Rakefile", @pending_imports=[], @imported=[], @loaders={".rb"=>#<Rake::DefaultLoader:0x00000000023eb910>, ".rf"=>#<Rake::DefaultLoader:0x00000000023eb898>, ".rake"=>#<Rake::DefaultLoader:0x00000000023eb7f8>}, @default_loader=#<Rake::DefaultLoader:0x00000000023eba00>, @original_dir="/usr/share/foreman", @top_level_tasks=["console"], @tty_output=true, @terminal_columns=0, @options=#<OpenStruct always_multitask=false, backtrace=false, build_all=false, dryrun=false, ignore_deprecate=false, ignore_system=false, job_stats=false, load_system=false, nosearch=false, rakelib=["rakelib"], show_all_tasks=false, show_prereqs=false, show_task_pattern=nil, show_tasks=nil, silent=false, suppress_backtrace_pattern=nil, thread_pool_size=60, trace=false, trace_output=#<IO:<STDERR>>, trace_rules=false>>, @scope=LL("katello")>

[root@katello ~]#

You are copying the whole file and not the selected part. cintrix84 selected the lines 6-24. That’s also what I mentioned above and where my small test above started.

When I click on the link above, those lines are shown with yellow background and I can conveniently copy those lines via the “Copy lines” in the menu with the three dots.

Here’s the output of just the yellow highlighted section (and a few more): Maybe I misunderstood what you wanted me to run::

v3/content/rpm/packages/%’)._rpm_count = ::Katello::Rpm.where(‘pulp_id LIKE ?’, '%/pulp/api/
irb(main):030:0* or(::Katello::Rpm.where.not(migrated_pulp3_href: nil)).count
migrated_erratum_count = ::Katello::RepositoryErratum.where.not(erratum_pulp3_href: nil).count
migrated_repo_count = ::Katello::Repository.where.not(version_href: nil).count
migratable_repo_count = ::Katello::Repository.count - ::Katello::Repository.puppet_type.count -
::Katello::Repository.ostree_type.count - ::Katello::Repository.deb_type.count

on_demand_rpm_count = Katello::RepositoryRpm.where(:repository_id => Katello::Repository.yum_type.on_demand).distinct.count
on_demand_unmigrated_rpm_count = on_demand_rpm_count - migrated_rpm_count
immediate_unmigrated_rpm_count = ::Katello::Rpm.count - migrated_rpm_count - on_demand_unmigrated_rpm_count

# On Demand RPMs: (6.46E-04)*(#RPMs) + -3.22
# Immediate RPMs: (9.39E-04)*(#RPMs) + -3
# Repositories: 0.0746*(#Repos) + -2.07
migration_minutes = (0.000646 * on_demand_unmigrated_rpm_count - 3.22 +
                     0.000943 * immediate_unmigrated_rpm_count - 3 +
                     0.0746 * migratable_repo_count).to_i
hours = (migration_minutes / 60) % 60
minutes = migration_minutes % 60

puts "============Migration Summary================"
puts "Migrated/Total RPMs: #{migrated_rpm_count}/#{::Katello::Rpm.count}"
puts "Migrated/Total errata: #{migrated_erratum_count}/#{::Katello::RepositoryErratum.count}"
puts "Migrated/Total repositories: #{migrated_repo_count}/#{migratable_repo_count}"
puts
# The timing formulas go negative if the amount of content is negligibly small
if migration_minutes >= 5
  puts "Estimated migration time based on yum content: #{hours} hours, #{minutes} minutes"
else
  puts "Estimated migration time based on yum content: fewer than 5 minutes"
end=> 0

pulp3_href: nil).countgrated_erratum_count = ::Katello::RepositoryErratum.where.not(erratum
=> 0
).countn):032:0> migrated_repo_count = ::Katello::Repository.where.not(version_href: nil
=> 0
tory.puppet_type.count -ratable_repo_count = ::Katello::Repository.count - ::Katello::Reposi
ype.count:034:0* ::Katello::Repository.ostree_type.count - ::Katello::Repository.deb_t
=> 257
irb(main):035:0>
tello::Repository.yum_type.on_demand).distinct.countRepositoryRpm.where(:repository_id => Ka
=> 846590
ntb(main):037:0> on_demand_unmigrated_rpm_count = on_demand_rpm_count - migrated_rpm_cou
=> 846590
unt - on_demand_unmigrated_rpm_countrated_rpm_count = ::Katello::Rpm.count - migrated_rpm_co
=> -624067
irb(main):039:0>
irb(main):040:0> # On Demand RPMs: (6.46E-04)(#RPMs) + -3.22
=> nil
irb(main):041:0> # Immediate RPMs: (9.39E-04)
(#RPMs) + -3
=> nil
irb(main):042:0> # Repositories: 0.0746*(#Repos) + -2.07
=> nil +
irb(main):044:1* 0.000943 * immediate_unmigrated_rpm_count - 3 +2 +
irb(main):045:1* 0.0746 * migratable_repo_count).to_i
=> -28
irb(main):046:0> hours = (migration_minutes / 60) % 60
=> 59
irb(main):047:0> minutes = migration_minutes % 60
=> 32
irb(main):048:0>
irb(main):049:0> puts “============Migration Summary================”
============Migration Summary================
=> nil
}"b(main):050:0> puts "Migrated/Total RPMs: #{migrated_rpm_count}/#{::Katello::Rpm.count
Migrated/Total RPMs: 0/222523
=> nil
ositoryErratum.count}"uts "Migrated/Total errata: #{migrated_erratum_count}/#{::Katello::Rep
Migrated/Total errata: 0/346293
=> nil
repo_count}"2:0> puts "Migrated/Total repositories: #{migrated_repo_count}/#{migratable

Migrated/Total repositories: 0/257
=> nil
irb(main):053:0> puts

=> nil
y smalln):054:0> # The timing formulas go negative if the amount of content is negligibl
=> nil
irb(main):055:0> if migration_minutes >= 5
#{minutes} minutes" puts "Estimated migration time based on yum content: #{hours} hours,
irb(main):057:1> else
utes"ain):058:1> puts "Estimated migration time based on yum content: fewer than 5 min
irb(main):059:1> end
Estimated migration time based on yum content: fewer than 5 minutes
=> nil

1 Like

FYI yesterday’s content prepare thing never moved after 24 hours… I finally killed it.

Yes did that once I figured out what you guys wanted. Output posted above, thanks! Sorry for the confusion.

@caseybea thanks for running those lines. We caught a sneaky bug with the timing thanks to it.

In the pulp3_migration_stats.rake file in your Katello’s source code, if you replace this line katello/pulp3_migration_stats.rake at KATELLO-3.18 · Katello/katello · GitHub with the following, your timing estimate should look better:

on_demand_rpm_count = Katello::RepositoryRpm.where(:repository_id => Katello::Repository.yum_type.on_demand).select(:rpm_id).distinct.count

As for the stuck migration, we need to take a look at the migration task from Pulp itself.
When you run your migration again, and it’s at the part where it seems to stop progressing, can you do the following:

  1. Look at the task and find the Actions::Pulp3::ContentMigration action. In the “Output”, there should be at least one bit of JSON that looks like “pulp_tasks”=> {“pulp_href”=> "<href>" ... Just like in your comment here. Grab the pulp_href(s).
  2. Use curl to get the status of the task (or tasks) and paste them here:
curl https://`hostname`<pulp_href>   --cert /etc/pki/katello/certs/pulp-client.crt  --key /etc/pki/katello/private/pulp-client.key

for example

curl https://`hostname`/pulp/api/v3/tasks/993f18f0-88dc-4aed-b9d6-796c38c7ce6a/   --cert /etc/pki/katello/certs/pulp-client.crt  --key /etc/pki/katello/private/pulp-client.key

but replace the href above with the href(s) in your stuck migration task.

Also, one thing that helps with the migration sometimes is running foreman-rake katello:delete_orphaned_content before migrating.

I am more concerned as to why it just hangs after starting.

Here is the result of the curl (thank you for the help!)— maybe this will shed some light?

[root@katello ~]# curl https://katello.ctsi.mcw.edu:/pulp/api/v3/tasks/ebebad42-edc0-4688-9277-8ea68af95660/ --cert /etc/pki/katello/certs/pulp-client.crt --key /etc/pki/katello/private/pulp-client.key

{“pulp_href”:"/pulp/api/v3/tasks/ebebad42-edc0-4688-9277-8ea68af95660/",“pulp_created”:“2021-04-28T20:09:45.692402Z”,“state”:“waiting”,“name”:“pulp_2to3_migration.app.tasks.migrate.migrate_from_pulp2”,“started_at”:null,“finished_at”:null,“error”:null,“worker”:"/pulp/api/v3/workers/087d7b44-f073-49ae-8e40-610ccdf818fc/",“parent_task”:null,“child_tasks”:,“task_group”:null,“progress_reports”:,“created_resources”:,“reserved_resources_record”:[“pulp_2to3_migration”]}[root@katello ~]#
[root@katello ~]#

I am fairly positive I did the orhpaned content cleanup, but I’ll try again to be certain.

We might be getting somewhere.

I could not finnd a history of the orphan cleanup, so it’s runnig now. It cruised along for a while, but it’s not seemingly “stuck” at 63% progress. The messages log is now just a bazillion of these:

Apr 28 15:36:12 katello pulpcore-api: - - [28/Apr/2021:20:36:12 +0000] “GET /pulp/api/v3/tasks/d595448f-d74c-43a7-b66d-65c3ba28f57c/ HTTP/1.1” 200 379 “-” “OpenAPI-Generator/3.7.1/ruby”
Apr 28 15:36:28 katello pulpcore-api: - - [28/Apr/2021:20:36:28 +0000] “GET /pulp/api/v3/tasks/d595448f-d74c-43a7-b66d-65c3ba28f57c/ HTTP/1.1” 200 379 “-” “OpenAPI-Generator/3.7.1/ruby”
Apr 28 15:36:44 katello pulpcore-api: - - [28/Apr/2021:20:36:44 +0000] “GET /pulp/api/v3/tasks/d595448f-d74c-43a7-b66d-65c3ba28f57c/ HTTP/1.1” 200 379 “-” “OpenAPI-Generator/3.7.1/ruby”
Apr 28 15:37:00 katello pulpcore-api: - - [28/Apr/2021:20:37:00 +0000] “GET /pulp/api/v3/tasks/d595448f-d74c-43a7-b66d-65c3ba28f57c/ HTTP/1.1” 200 379 “-” “OpenAPI-Generator/3.7.1/ruby”
Apr 28 15:37:16 katello pulpcore-api: - - [28/Apr/2021:20:37:16 +0000] “GET /pulp/api/v3/tasks/d595448f-d74c-43a7-b66d-65c3ba28f57c/ HTTP/1.1” 200 379 “-” “OpenAPI-Generator/3.7.1/ruby”

Could there be some data inconsistencies on my pulp data that are prevenitng this (the orphan cleanup), and also the more important pulp3 migration from happening?

I’m open to trying various things to check the data for consistency etc. Let me know if you think I’ on the right path here. :slight_smile:

@caseybea you might want to curl the /pulp/api/v3/tasks/d595448f-d74c-43a7-b66d-65c3ba28f57c/ href too. I wonder if your Pulp 3 just isn’t running tasks which is why you’re getting stuck. If orphan cleanup doesn’t complete I’m going to guess this is the case. Your migration definitely shouldn’t be stuck at “waiting”, it should be “running”. I’ll see if the Pulp team has any recommendations for debugging stuck Pulp 3 tasks.

here’'s the curl output (yes, says waiting). Orphan cleanup still stuck. I’ll kill it at the end of the day if it doesn’t move…

I also am including a hammer status just for good measure.

[root@katello ~]# curl https://katello.ctsi.mcw.edu:/pulp/api/v3/tasks/d595448f-d74c-43a7-b66d-65c3ba28f57c/ --cert /etc/pki/katello/certs/pulp-client.crt --key /etc/pki/katello/private/pulp-client.key
{“pulp_href”:"/pulp/api/v3/tasks/d595448f-d74c-43a7-b66d-65c3ba28f57c/",“pulp_created”:“2021-04-28T20:20:43.534728Z”,“state”:“waiting”,“name”:“pulpcore.app.tasks.orphan.orphan_cleanup”,“started_at”:null,“finished_at”:null,“error”:null,“worker”:null,“parent_task”:null,“child_tasks”:,“task_group”:null,“progress_reports”:,“created_resources”:,“reserved_resources_record”:}[root@katello ~]#
[root@katello ~]#
[root@katello ~]# hammer status
Version: 2.3.3
API Version: v2
Database:
Status: ok
Server Response: Duration: 0ms
Plugins:

  1. Name: foreman-tasks
    Version: 3.0.5
  2. Name: foreman_remote_execution
    Version: 4.2.2
  3. Name: katello
    Version: 3.18.2.1
    Smart Proxies:
  4. Name: katello.ctsi.mcw.edu
    Version: 2.3.3
    Status: ok
    Features:
    1. Name: pulp
      Version: 2.1.0
    2. Name: pulpcore
      Version: 2.1.0
    3. Name: dynflow
      Version: 0.3.0
    4. Name: ssh
      Version: 0.3.1
    5. Name: templates
      Version: 2.3.3
    6. Name: tftp
      Version: 2.3.3
    7. Name: puppetca
      Version: 2.3.3
    8. Name: puppet
      Version: 2.3.3
    9. Name: logs
      Version: 2.3.3
      10)Name: httpboot
      Version: 2.3.3
      11)Name: registration
      Version: 2.3.3
      Compute Resources:

candlepin:
Status: ok
Server Response: Duration: 17ms
candlepin_events:
Status: ok
message: 2 Processed, 0 Failed
Server Response: Duration: 0ms
candlepin_auth:
Status: ok
Server Response: Duration: 15ms
katello_events:
Status: ok
message: 1 Processed, 0 Failed
Server Response: Duration: 0ms
pulp:
Status: ok
Server Response: Duration: 27ms
pulp_auth:
Status: ok
Server Response: Duration: 12ms
pulp3:
Status: ok
Server Response: Duration: 35ms
foreman_tasks:
Status: ok
Server Response: Duration: 2ms

[root@katello ~]#

I truly appreciate all the help! I look forward to seeing what you hear back. Thank you so much.

@iballou So just to confirm, the orphan cleanup never completed, it got stuck right away as previously noted. So at present, I cannot run that, and I cannot run the “content prepare” as we know. Let me know what you hear from the pulp team, thank you!

Just confirming where we’re at.

I did try one extra thing, which was performing a db.repairDatabase() on the pulp database. It executed just fine, no errors. But-- did not make a difference, both the orphan and prepare still get stuck. It was worth a shot!

@caseybea so if a task is waiting in Pulp 3, that means either something else is running and taking up all of the task time (maybe something is stuck?) or there might be something else relating to tasks orphaned in the Pulp 3 database.

To check for other running tasks, curl the following:

curl https://`hostname`/pulp/api/v3/tasks/?state=running   --cert /etc/pki/katello/certs/pulp-client.crt  --key /etc/pki/katello/private/pulp-client.key

If anything comes up, we should kill it like so:

curl --request PATCH --header "Content-Type: application/json" --data '{ "state": "canceled" }' https://`hostname`/pulp/api/v3/tasks/<task_id>/   --cert /etc/pki/katello/certs/pulp-client.crt  --key /etc/pki/katello/private/pulp-client.key

If that doesn’t work, the last option I can think of for now would be to drop the Pulp 3 database, re-create it, and re-migrate it. There may be a simpler way, but I’ll have to check. We’ll get to that after we try checking the tasks.

booooo (not you, but my situation- ha!)

Did the curl check— and… nothing:

{“count”:0,“next”:null,“previous”:null,“results”:}

I of course am more than willing to try the DB drop as suggested. Let me know what you wish me to try!

(and again, thank you SO much for the continued help. I off course am hoping to resolve my own situation, but I suspect as more and more people get closer to Katello 4 and the required pulp3 migration, there’s probably a few other folks that may end up with the same issue)

2 Likes

@caseybea no problem! The more issues we solve here the better for the future.

The Pulp team gave me a command to try before we drop the entire DB:

sudo systemctl stop pulpcore* --all
sudo -u pulp PULP_SETTINGS='/etc/pulp/settings.py'  /usr/bin/pulpcore-manager shell -c "import pulpcore; pulpcore.app.models.ReservedResource.objects.all().delete()"
sudo systemctl restart pulpcore* --all

For other people joining us, please only run the above command if you have not run the Pulp 3 switchover and are willing to have to run through the full Pulp 3 migration again. It is a dangerous operation!

Then, I’d say try running orphan cleanup because it should take much less time than the migration. If orphan cleanup doesn’t get stuck, you should be good to run the migration.

If orphaned cleanup gets stuck, here are the commands to reset the database:

sudo systemctl stop pulpcore*
sudo su - postgres
dropdb pulpcore
createdb pulpcore
exit
cd /tmp
sudo -u pulp PULP_SETTINGS='/etc/pulp/settings.py' DJANGO_SETTINGS_MODULE='pulpcore.app.settings' /usr/bin/pulpcore-manager  migrate --no-input
sudo -u pulp PULP_SETTINGS='/etc/pulp/settings.py' DJANGO_SETTINGS_MODULE='pulpcore.app.settings'  /usr/bin/pulpcore-manager reset-admin-password --password <some random password>
sudo systemctl restart pulpcore*
2 Likes

Aha! I think you have finally removed the thorn from the lion’s paw.

After the first command (the object delete one), the orphaned-delete operation completed successfully. I have now begun the “content prepare” operation. It is moving along AND I’m actually getting output.

I will report back after this too has comepeted and I’ve attempted the actual miggration command.

To clarify for anyone else watching, I only did the first command, I did NOT have to delete the etire database as noted above “if the first command didn’t work”.

Glad to hear that pulpcore-manager shell command helped! That’s a bug that the Pulp team fixed only recently, which explains why it’s happening in 3.18. Let us know if you have any other issues.

1 Like

Thanks @iballou for looking at this. I marked your comment with the steps as the solution.

OK, well certainy a lot of progress but not out of the woods yet.

As the content prepare got to the end of the RPMs, it failed with this error:

2021-04-29 15:36:30 -0500: Migrating rpm content to Pulp 3 erratum 2997/234938Migration failed, You will want to investigate: https://katello.ctsi.mcw.edu/foreman_tasks/tasks/c3ace595-9285-49e9-adf3-6acac5daccdc
rake aborted!
ForemanTasks::TaskError: Task c3ace595-9285-49e9-adf3-6acac5daccdc: Katello::Errors::Pulp3Error: No declared artifact with relative path ".treeinfo" for content "<DistributionTree: pk=26685499-74b8-4337-adc6-5bb2d5325ae3>"
/opt/theforeman/tfm/root/usr/share/gems/gems/katello-3.18.2.1/lib/katello/tasks/pulp3_migration.rake:33:in `block (2 levels) in <top (required)>'
/opt/rh/rh-ruby25/root/usr/share/gems/gems/rake-12.3.0/exe/rake:27:in `<top (required)>'
Tasks: TOP => katello:pulp3_migration
(See full trace by running task with --trace)
                                                                      [FAIL]
Failed executing foreman-rake katello:pulp3_migration, exit status 1
--------------------------------------------------------------------------------
Scenario [Prepare content for Pulp 3] failed.

The following steps ended up in failing state:

  [content-prepare]

Resolve the failed steps and rerun
the command. In case the failures are false positives,
use --whitelist="content-prepare"

The error in the task monitor was:

No declared artifact with relative path “.treeinfo” for content “<DistributionTree: pk=26685499-74b8-4337-adc6-5bb2d5325ae3>”

And here is the /var/log/messages content from the time of the abort:

Apr 29 15:36:01 katello pulpcore-api: - - [29/Apr/2021:20:36:01 +0000] "GET /pulp/api/v3/tasks/9bedaf30-97ad-417c-a16a-11a037f76ce2/ HTTP/1.1" 200 7017 "-" "OpenAPI-Generator/3.7.1/ruby"
Apr 29 15:36:01 katello pulpcore-api: - - [29/Apr/2021:20:36:01 +0000] "GET /pulp/api/v3/task-groups/211a1157-9f21-40d2-ad0e-dc89b54e91a3/ HTTP/1.1" 200 440 "-" "OpenAPI-Generator/3.7.1/ruby"
Apr 29 15:36:17 katello pulpcore-api: - - [29/Apr/2021:20:36:17 +0000] "GET /pulp/api/v3/tasks/9bedaf30-97ad-417c-a16a-11a037f76ce2/ HTTP/1.1" 200 7020 "-" "OpenAPI-Generator/3.7.1/ruby"
Apr 29 15:36:17 katello pulpcore-api: - - [29/Apr/2021:20:36:17 +0000] "GET /pulp/api/v3/task-groups/211a1157-9f21-40d2-ad0e-dc89b54e91a3/ HTTP/1.1" 200 440 "-" "OpenAPI-Generator/3.7.1/ruby"
Apr 29 15:36:25 katello pulpcore-worker-8: pulp: rq.worker:ERROR: Traceback (most recent call last):
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/rq/worker.py", line 936, in perform_job
Apr 29 15:36:25 katello pulpcore-worker-8: rv = job.perform()
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/rq/job.py", line 684, in perform
Apr 29 15:36:25 katello pulpcore-worker-8: self._result = self._execute()
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/rq/job.py", line 690, in _execute
Apr 29 15:36:25 katello pulpcore-worker-8: return self.func(*self.args, **self.kwargs)
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulp_2to3_migration/app/tasks/migrate.py", line 81, in migrate_from_pulp2
Apr 29 15:36:25 katello pulpcore-worker-8: migrate_content(plan, skip_corrupted=skip_corrupted)
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulp_2to3_migration/app/migration.py", line 47, in migrate_content
Apr 29 15:36:25 katello pulpcore-worker-8: plugin.migrator.migrate_content_to_pulp3(skip_corrupted=skip_corrupted)
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulp_2to3_migration/app/plugin/rpm/migrator.py", line 145, in migrate_content_to_pulp3
Apr 29 15:36:25 katello pulpcore-worker-8: loop.run_until_complete(dm.create())
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib64/python3.6/asyncio/base_events.py", line 484, in run_until_complete
Apr 29 15:36:25 katello pulpcore-worker-8: return future.result()
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulp_2to3_migration/app/plugin/content.py", line 89, in create
Apr 29 15:36:25 katello pulpcore-worker-8: await pipeline
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulpcore/plugin/stages/api.py", line 225, in create_pipeline
Apr 29 15:36:25 katello pulpcore-worker-8: await asyncio.gather(*futures)
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulpcore/plugin/stages/api.py", line 43, in __call__
Apr 29 15:36:25 katello pulpcore-worker-8: await self.run()
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulpcore/plugin/stages/artifact_stages.py", line 244, in run
Apr 29 15:36:25 katello pulpcore-worker-8: RemoteArtifact.objects.bulk_get_or_create(self._needed_remote_artifacts(batch))
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulpcore/plugin/stages/artifact_stages.py", line 301, in _needed_remote_artifacts
Apr 29 15:36:25 katello pulpcore-worker-8: msg.format(rp=content_artifact.relative_path, c=d_content.content)
Apr 29 15:36:25 katello pulpcore-worker-8: ValueError: No declared artifact with relative path ".treeinfo" for content "<DistributionTree: pk=26685499-74b8-4337-adc6-5bb2d5325ae3>"
Apr 29 15:36:25 katello pulpcore-worker-8: Traceback (most recent call last):
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/rq/worker.py", line 936, in perform_job
Apr 29 15:36:25 katello pulpcore-worker-8: rv = job.perform()
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/rq/job.py", line 684, in perform
Apr 29 15:36:25 katello pulpcore-worker-8: self._result = self._execute()
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/rq/job.py", line 690, in _execute
Apr 29 15:36:25 katello pulpcore-worker-8: return self.func(*self.args, **self.kwargs)
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulp_2to3_migration/app/tasks/migrate.py", line 81, in migrate_from_pulp2
Apr 29 15:36:25 katello pulpcore-worker-8: migrate_content(plan, skip_corrupted=skip_corrupted)
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulp_2to3_migration/app/migration.py", line 47, in migrate_content
Apr 29 15:36:25 katello pulpcore-worker-8: plugin.migrator.migrate_content_to_pulp3(skip_corrupted=skip_corrupted)
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulp_2to3_migration/app/plugin/rpm/migrator.py", line 145, in migrate_content_to_pulp3
Apr 29 15:36:25 katello pulpcore-worker-8: loop.run_until_complete(dm.create())
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib64/python3.6/asyncio/base_events.py", line 484, in run_until_complete
Apr 29 15:36:25 katello pulpcore-worker-8: return future.result()
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulp_2to3_migration/app/plugin/content.py", line 89, in create
Apr 29 15:36:25 katello pulpcore-worker-8: await pipeline
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulpcore/plugin/stages/api.py", line 225, in create_pipeline
Apr 29 15:36:25 katello pulpcore-worker-8: await asyncio.gather(*futures)
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulpcore/plugin/stages/api.py", line 43, in __call__
Apr 29 15:36:25 katello pulpcore-worker-8: await self.run()
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulpcore/plugin/stages/artifact_stages.py", line 244, in run
Apr 29 15:36:25 katello pulpcore-worker-8: RemoteArtifact.objects.bulk_get_or_create(self._needed_remote_artifacts(batch))
Apr 29 15:36:25 katello pulpcore-worker-8: File "/usr/lib/python3.6/site-packages/pulpcore/plugin/stages/artifact_stages.py", line 301, in _needed_remote_artifacts
Apr 29 15:36:25 katello pulpcore-worker-8: msg.format(rp=content_artifact.relative_path, c=d_content.content)
Apr 29 15:36:25 katello pulpcore-worker-8: ValueError: No declared artifact with relative path ".treeinfo" for content "<DistributionTree: pk=26685499-74b8-4337-adc6-5bb2d5325ae3>"
Apr 29 15:36:26 katello pulpcore-worker-8: pulp: rq.worker:INFO: Cleaning registries for queue: 46938@katello.ctsi.mcw.edu
Apr 29 15:36:26 katello pulpcore-worker-8: pulp: rq.worker:INFO: 46938@katello.ctsi.mcw.edu: 94e7cb44-25cd-4a47-a067-3f8f241fcf12
Apr 29 15:36:26 katello pulpcore-worker-8: pulp: rq.worker:INFO: 46938@katello.ctsi.mcw.edu: Job OK (94e7cb44-25cd-4a47-a067-3f8f241fcf12)
Apr 29 15:36:34 katello pulpcore-api: - - [29/Apr/2021:20:36:34 +0000] "GET /pulp/api/v3/tasks/9bedaf30-97ad-417c-a16a-11a037f76ce2/ HTTP/1.1" 200 8979 "-" "OpenAPI-Generator/3.7.1/ruby"
Apr 29 15:36:34 katello pulpcore-api: - - [29/Apr/2021:20:36:34 +0000] "GET /pulp/api/v3/task-groups/211a1157-9f21-40d2-ad0e-dc89b54e91a3/ HTTP/1.1" 200 440 "-" "OpenAPI-Generator/3.7.1/ruby"
Apr 29 15:39:31 katello pulp: celery.beat:INFO: Scheduler: Sending due task download_deferred_content (pulp.server.controllers.repository.queue_download_deferred)
Apr 29 15:39:31 katello pulp: celery.worker.strategy:INFO: Received task: pulp.server.controllers.repository.queue_download_deferred[7a750e82-7138-43d8-983b-fe0ba643eff0]
Apr 29 15:39:31 katello pulp: celery.app.trace:INFO: [7a750e82] Task pulp.server.controllers.repository.queue_download_deferred[7a750e82-7138-43d8-983b-fe0ba643eff0] succeeded in 0.00519433498266s: None
Apr 29 15:39:31 katello pulp: celery.worker.strategy:INFO: Received task: pulp.server.controllers.repository.download_deferred[d50c4d29-3fcf-4790-8740-c05990d89db5]
Apr 29 15:39:32 katello pulp: celery.app.trace:INFO: [d50c4d29] Task pulp.server.controllers.repository.download_deferred[d50c4d29-3fcf-4790-8740-c05990d89db5] succeeded in 1.007954431s: None
Apr 29 15:39:59 katello pulpcore-worker-7: pulp: rq.worker:INFO: Cleaning registries for queue: 46941@katello.ctsi.mcw.edu
Apr 29 15:40:00 katello pulpcore-worker-6: pulp: rq.worker:INFO: Cleaning registries for queue: 46942@katello.ctsi.mcw.edu
Apr 29 15:40:00 katello pulpcore-worker-2: pulp: rq.worker:INFO: Cleaning registries for queue: 46939@katello.ctsi.mcw.edu
Apr 29 15:40:00 katello pulpcore-worker-1: pulp: rq.worker:INFO: Cleaning registries for queue: 46935@katello.ctsi.mcw.edu

Same error I have: [ContentMigration] Katello::Errors::Pulp3Error

Do you have CentOS 8 Stream repositories?