From 821f8c1483c8746f8276601f30e969a23389f41c Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 4 Feb 2025 10:33:19 +0100 Subject: [PATCH 001/108] Clean up exploration class. --- src/search/landmarks/exploration.cc | 193 ++++++++++++++++------------ src/search/landmarks/exploration.h | 15 ++- 2 files changed, 127 insertions(+), 81 deletions(-) diff --git a/src/search/landmarks/exploration.cc b/src/search/landmarks/exploration.cc index 2efd8c23d3..a6d53475ff 100644 --- a/src/search/landmarks/exploration.cc +++ b/src/search/landmarks/exploration.cc @@ -3,13 +3,11 @@ #include "util.h" #include "../task_utils/task_properties.h" -#include "../utils/collections.h" #include "../utils/hash.h" #include "../utils/logging.h" #include #include -#include using namespace std; @@ -30,30 +28,42 @@ Exploration::Exploration(const TaskProxy &task_proxy, utils::LogProxy &log) log << "Initializing Exploration..." << endl; } - // Build propositions. + build_propositions(); + build_unary_operators(); +} + +void Exploration::build_propositions() { for (VariableProxy var : task_proxy.get_variables()) { - int var_id = var.get_id(); - propositions.push_back(vector(var.get_domain_size())); + const int var_id = var.get_id(); + propositions.emplace_back(var.get_domain_size()); for (int value = 0; value < var.get_domain_size(); ++value) { propositions[var_id][value].fact = FactPair(var_id, value); } } +} +static int compute_number_of_unary_operators( + const OperatorsProxy &operators, const AxiomsProxy &axioms) { + int num_unary_ops = 0; + for (OperatorProxy op : operators) { + num_unary_ops += static_cast(op.get_effects().size()); + } + for (OperatorProxy axiom : axioms) { + num_unary_ops += static_cast(axiom.get_effects().size()); + } + return num_unary_ops; +} + +void Exploration::build_unary_operators() { /* Reserve vector size unary operators. This is needed because we cross-reference to the memory address of elements of the vector while building it; meaning a resize would invalidate all references. */ - int num_unary_ops = 0; OperatorsProxy operators = task_proxy.get_operators(); AxiomsProxy axioms = task_proxy.get_axioms(); - for (OperatorProxy op : operators) { - num_unary_ops += op.get_effects().size(); - } - for (OperatorProxy axiom : axioms) { - num_unary_ops += axiom.get_effects().size(); - } - unary_operators.reserve(num_unary_ops); + unary_operators.reserve( + compute_number_of_unary_operators(operators, axioms)); // Build unary operators for operators and axioms. for (OperatorProxy op : operators) @@ -62,84 +72,78 @@ Exploration::Exploration(const TaskProxy &task_proxy, utils::LogProxy &log) build_unary_operators(axiom); } +vector Exploration::get_sorted_precondition_propositions( + const vector &preconditions, const EffectProxy &effect) { + vector extended_preconditions(preconditions); + const EffectConditionsProxy &effect_conditions = effect.get_conditions(); + for (FactProxy effect_condition : effect_conditions) { + extended_preconditions.push_back(effect_condition.get_pair()); + } + + sort(extended_preconditions.begin(), extended_preconditions.end()); + + vector precondition_propositions; + for (const FactPair &precondition_fact : extended_preconditions) { + precondition_propositions.push_back( + &propositions[precondition_fact.var][precondition_fact.value]); + } + return precondition_propositions; +} + void Exploration::build_unary_operators(const OperatorProxy &op) { - // Note: changed from the original to allow sorting of operator conditions - vector precondition; - vector precondition_facts1; + vector preconditions; + int op_or_axiom_id = get_operator_or_axiom_id(op); for (FactProxy pre : op.get_preconditions()) { - precondition_facts1.push_back(pre.get_pair()); + preconditions.push_back(pre.get_pair()); } for (EffectProxy effect : op.get_effects()) { - vector precondition_facts2(precondition_facts1); - EffectConditionsProxy effect_conditions = effect.get_conditions(); - for (FactProxy effect_condition : effect_conditions) { - precondition_facts2.push_back(effect_condition.get_pair()); - } - - sort(precondition_facts2.begin(), precondition_facts2.end()); - - for (const FactPair &precondition_fact : precondition_facts2) - precondition.push_back(&propositions[precondition_fact.var] - [precondition_fact.value]); - + vector precondition_propositions = + get_sorted_precondition_propositions(preconditions, effect); FactProxy effect_fact = effect.get_fact(); - Proposition *effect_proposition = &propositions[effect_fact.get_variable().get_id()][effect_fact.get_value()]; - int op_or_axiom_id = get_operator_or_axiom_id(op); - unary_operators.emplace_back(precondition, effect_proposition, op_or_axiom_id); + Proposition *effect_proposition = &propositions[ + effect_fact.get_variable().get_id()][effect_fact.get_value()]; + unary_operators.emplace_back( + precondition_propositions, effect_proposition, op_or_axiom_id); // Cross-reference unary operators. - for (Proposition *pre : precondition) { + for (Proposition *pre : precondition_propositions) { pre->precondition_of.push_back(&unary_operators.back()); } - - precondition.clear(); - precondition_facts2.clear(); } } -/* - This function initializes the priority queue and the information associated - with propositions and unary operators for the relaxed exploration. Unary - operators that are not allowed to be applied due to exclusions are marked by - setting their *excluded* flag. -*/ -void Exploration::setup_exploration_queue( - const State &state, const vector &excluded_props, - const vector &excluded_op_ids) { - prop_queue.clear(); - - // Reset reachability information. +void Exploration::reset_reachability_information() { for (auto &propositions_for_variable : propositions) { for (auto &prop : propositions_for_variable) { prop.reached = false; } } +} - for (const FactPair &fact : excluded_props) { - propositions[fact.var][fact.value].excluded = true; - } - - // Set facts that are true in the current state as reached. +void Exploration::set_state_facts_reached(const State &state) { for (FactProxy fact : state) { Proposition *init_prop = &propositions[fact.get_variable().get_id()][fact.get_value()]; enqueue_if_necessary(init_prop); } +} - /* - Unary operators derived from operators that are excluded or achieve - an excluded proposition *unconditionally* must be marked as excluded. - - Note that we in general cannot exclude all unary operators derived from - operators that achieve an excluded propositon *conditionally*: - Given an operator with uncoditional effect e1 and conditional effect e2 - with condition c yields unary operators uo1: {} -> e1 and uo2: c -> e2. - Excluding both would not allow us to achieve e1 when excluding - proposition e2. We instead only mark uo2 as excluded (see below when - looping over all unary operators). Note however that this can lead to - an overapproximation, e.g. if the effect e1 also has condition c. - */ +/* + Unary operators derived from operators that are excluded or achieve an + excluded proposition *unconditionally* must be marked as excluded. + + Note that we in general cannot exclude all unary operators derived from + operators that achieve an excluded propositon *conditionally*: + Given an operator with uncoditional effect e1 and conditional effect e2 with + condition c yields unary operators uo1: {} -> e1 and uo2: c -> e2. Excluding + both would not allow us to achieve e1 when excluding proposition e2. We + instead only mark uo2 as excluded (see in *initialize_operator_data* when + looping over all unary operators). Note however that this can lead to an + overapproximation, e.g. if the effect e1 also has condition c. +*/ +unordered_set Exploration::get_excluded_operators( + const vector &excluded_op_ids) const { unordered_set op_ids_to_mark(excluded_op_ids.begin(), excluded_op_ids.end()); for (OperatorProxy op : task_proxy.get_operators()) { @@ -152,28 +156,56 @@ void Exploration::setup_exploration_queue( } } } + return op_ids_to_mark; +} + +void Exploration::initialize_operator_data( + const vector &excluded_op_ids) { + const unordered_set op_ids_to_mark = + get_excluded_operators(excluded_op_ids); - // Initialize operator data, queue effects of precondition-free operators. for (UnaryOperator &op : unary_operators) { op.unsatisfied_preconditions = op.num_preconditions; /* Aside from UnaryOperators derived from operators with an id in op_ids_to_mark we also exclude UnaryOperators that have an excluded - proposition as effect (see comment when building *op_ids_to_mark*). + proposition as effect (see comment for *get_excluded_operators*). */ - if (op.effect->excluded - || op_ids_to_mark.count(op.op_or_axiom_id)) { + if (op.effect->excluded || op_ids_to_mark.contains(op.op_or_axiom_id)) { // Operator will not be applied during relaxed exploration. op.excluded = true; continue; } op.excluded = false; // Reset from previous exploration. + // Queue effects of precondition-free operators. if (op.unsatisfied_preconditions == 0) { enqueue_if_necessary(op.effect); } } +} + +/* + This function initializes the priority queue and the information associated + with propositions and unary operators for the relaxed exploration. Unary + operators that are not allowed to be applied due to exclusions are marked by + setting their *excluded* flag. +*/ +void Exploration::setup_exploration_queue( + const State &state, const vector &excluded_props, + const vector &excluded_op_ids) { + prop_queue.clear(); + + reset_reachability_information(); + + // Set *excluded* to true for initializing operator data. + for (const FactPair &fact : excluded_props) { + propositions[fact.var][fact.value].excluded = true; + } + + set_state_facts_reached(state); + initialize_operator_data(excluded_op_ids); // Reset *excluded* to false for the next exploration. for (const FactPair &fact : excluded_props) { @@ -186,7 +218,8 @@ void Exploration::relaxed_exploration() { Proposition *prop = prop_queue.front(); prop_queue.pop_front(); - const vector &triggered_operators = prop->precondition_of; + const vector &triggered_operators = + prop->precondition_of; for (UnaryOperator *unary_op : triggered_operators) { if (unary_op->excluded) continue; @@ -206,14 +239,7 @@ void Exploration::enqueue_if_necessary(Proposition *prop) { } } -vector> Exploration::compute_relaxed_reachability( - const vector &excluded_props, - const vector &excluded_op_ids) { - setup_exploration_queue(task_proxy.get_initial_state(), - excluded_props, excluded_op_ids); - relaxed_exploration(); - - // Bundle reachability information into the return data structure. +vector> Exploration::bundle_reachability_information() const { vector> reached; reached.resize(propositions.size()); for (size_t var_id = 0; var_id < propositions.size(); ++var_id) { @@ -226,4 +252,13 @@ vector> Exploration::compute_relaxed_reachability( } return reached; } + +vector> Exploration::compute_relaxed_reachability( + const vector &excluded_props, + const vector &excluded_op_ids) { + setup_exploration_queue(task_proxy.get_initial_state(), + excluded_props, excluded_op_ids); + relaxed_exploration(); + return bundle_reachability_information(); +} } diff --git a/src/search/landmarks/exploration.h b/src/search/landmarks/exploration.h index 8552bba73a..4e46a2d908 100644 --- a/src/search/landmarks/exploration.h +++ b/src/search/landmarks/exploration.h @@ -7,7 +7,6 @@ #include "../algorithms/priority_queues.h" -#include #include #include @@ -59,19 +58,31 @@ class Exploration { std::vector> propositions; std::deque prop_queue; + void build_propositions(); + void build_unary_operators(); + std::vector get_sorted_precondition_propositions( + const std::vector &preconditions, const EffectProxy &effect); void build_unary_operators(const OperatorProxy &op); + + void reset_reachability_information(); + void set_state_facts_reached(const State &state); + std::unordered_set get_excluded_operators( + const std::vector &excluded_op_ids) const; + void initialize_operator_data(const std::vector &excluded_op_ids); void setup_exploration_queue( const State &state, const std::vector &excluded_props, const std::vector &excluded_op_ids); void relaxed_exploration(); void enqueue_if_necessary(Proposition *prop); + + std::vector> bundle_reachability_information() const; public: Exploration(const TaskProxy &task_proxy, utils::LogProxy &log); /* Computes the reachability of each proposition when excluding operators in *excluded_op_ids* and ensuring that propositions - in *excluded_pros* are not achieved. + in *excluded_props* are not achieved. The returned vector of vector denotes for each proposition (grouped by their fact variable) whether it is relaxed reachable. The values are exact in the absence of conditional effects, otherwise From c93adef796989a5d04aea2f7d4184d3f43eea1a6 Mon Sep 17 00:00:00 2001 From: Remo Christen Date: Wed, 5 Feb 2025 15:42:42 +0100 Subject: [PATCH 002/108] Make landmark graph iterable and rename landmark members. --- .../cartesian_abstractions/utils_landmarks.cc | 10 +-- src/search/landmarks/exploration.cc | 1 + src/search/landmarks/landmark.cc | 21 ++---- src/search/landmarks/landmark.h | 24 +++--- .../landmark_cost_partitioning_algorithms.cc | 7 +- src/search/landmarks/landmark_factory.cc | 2 +- src/search/landmarks/landmark_factory_h_m.cc | 10 +-- .../landmarks/landmark_factory_merged.cc | 21 +++--- .../landmark_factory_reasonable_orders_hps.cc | 18 ++--- .../landmarks/landmark_factory_relaxation.cc | 4 +- .../landmarks/landmark_factory_rpg_sasp.cc | 6 +- .../landmarks/landmark_factory_rpg_sasp.h | 2 +- src/search/landmarks/landmark_graph.cc | 48 +++++++----- src/search/landmarks/landmark_graph.h | 73 +++++++++++-------- src/search/landmarks/landmark_heuristic.cc | 6 +- .../landmarks/landmark_status_manager.cc | 36 ++++----- .../landmarks/landmark_status_manager.h | 6 +- .../landmarks/landmark_sum_heuristic.cc | 2 +- src/search/landmarks/util.cc | 6 +- 19 files changed, 159 insertions(+), 144 deletions(-) diff --git a/src/search/cartesian_abstractions/utils_landmarks.cc b/src/search/cartesian_abstractions/utils_landmarks.cc index dec84a3adc..4f54db31ad 100644 --- a/src/search/cartesian_abstractions/utils_landmarks.cc +++ b/src/search/cartesian_abstractions/utils_landmarks.cc @@ -29,9 +29,8 @@ shared_ptr get_landmark_graph( vector get_fact_landmarks(const LandmarkGraph &graph) { vector facts; - const LandmarkGraph::Nodes &nodes = graph.get_nodes(); - facts.reserve(nodes.size()); - for (auto &node : nodes) { + facts.reserve(graph.get_num_landmarks()); + for (const auto &node : graph) { facts.push_back(get_fact(node->get_landmark())); } sort(facts.begin(), facts.end()); @@ -40,13 +39,12 @@ vector get_fact_landmarks(const LandmarkGraph &graph) { utils::HashMap get_fact_to_landmark_map( const shared_ptr &graph) { - const LandmarkGraph::Nodes &nodes = graph->get_nodes(); // All landmarks are simple, i.e., each has exactly one fact. - assert(all_of(nodes.begin(), nodes.end(), [](auto &node) { + assert(all_of(graph->begin(), graph->end(), [](auto &node) { return node->get_landmark().facts.size() == 1; })); utils::HashMap fact_to_landmark_map; - for (auto &node : nodes) { + for (const auto &node : *graph) { const FactPair &fact = node->get_landmark().facts[0]; fact_to_landmark_map[fact] = node.get(); } diff --git a/src/search/landmarks/exploration.cc b/src/search/landmarks/exploration.cc index a6d53475ff..411f29dad9 100644 --- a/src/search/landmarks/exploration.cc +++ b/src/search/landmarks/exploration.cc @@ -83,6 +83,7 @@ vector Exploration::get_sorted_precondition_propositions( sort(extended_preconditions.begin(), extended_preconditions.end()); vector precondition_propositions; + precondition_propositions.reserve(extended_preconditions.size()); for (const FactPair &precondition_fact : extended_preconditions) { precondition_propositions.push_back( &propositions[precondition_fact.var][precondition_fact.value]); diff --git a/src/search/landmarks/landmark.cc b/src/search/landmarks/landmark.cc index 4056858c9a..ed3c3f7f2e 100644 --- a/src/search/landmarks/landmark.cc +++ b/src/search/landmarks/landmark.cc @@ -4,21 +4,14 @@ using namespace std; namespace landmarks { bool Landmark::is_true_in_state(const State &state) const { - if (disjunctive) { - for (const FactPair &fact : facts) { - if (state[fact.var].get_value() == fact.value) { - return true; - } - } - return false; + auto is_fact_true_in_state = [&](const FactPair &fact) { + return state[fact.var].get_value() == fact.value; + }; + if (is_disjunctive) { + return any_of(facts.cbegin(), facts.cend(), is_fact_true_in_state); } else { - // conjunctive or simple - for (const FactPair &fact : facts) { - if (state[fact.var].get_value() != fact.value) { - return false; - } - } - return true; + // Is conjunctive or simple. + return all_of(facts.cbegin(), facts.cend(), is_fact_true_in_state); } } } diff --git a/src/search/landmarks/landmark.h b/src/search/landmarks/landmark.h index 7473f61672..380ae93029 100644 --- a/src/search/landmarks/landmark.h +++ b/src/search/landmarks/landmark.h @@ -8,26 +8,28 @@ namespace landmarks { class Landmark { public: - Landmark(std::vector _facts, bool disjunctive, bool conjunctive, - bool is_true_in_goal = false, bool is_derived = false) - : facts(move(_facts)), disjunctive(disjunctive), conjunctive(conjunctive), - is_true_in_goal(is_true_in_goal), is_derived(is_derived) { - assert(!(conjunctive && disjunctive)); - assert((conjunctive && facts.size() > 1) - || (disjunctive && facts.size() > 1) || facts.size() == 1); + Landmark(std::vector _facts, bool is_disjunctive, + bool is_conjunctive, bool is_true_in_goal = false, + bool is_derived = false) + : facts(move(_facts)), is_disjunctive(is_disjunctive), + is_conjunctive(is_conjunctive), is_true_in_goal(is_true_in_goal), + is_derived(is_derived) { + assert(!(is_conjunctive && is_disjunctive)); + assert((is_conjunctive && facts.size() > 1) || + (is_disjunctive && facts.size() > 1) || facts.size() == 1); } - bool operator ==(const Landmark &other) const { + bool operator==(const Landmark &other) const { return this == &other; } - bool operator !=(const Landmark &other) const { + bool operator!=(const Landmark &other) const { return !(*this == other); } std::vector facts; - bool disjunctive; - bool conjunctive; + const bool is_disjunctive; + const bool is_conjunctive; bool is_true_in_goal; bool is_derived; diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc index b669b7e785..22ae19d967 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc @@ -46,7 +46,6 @@ double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( vector achieved_lms_by_op(operator_costs.size(), 0); vector action_landmarks(operator_costs.size(), false); - const LandmarkGraph::Nodes &nodes = lm_graph.get_nodes(); ConstBitsetView past = lm_status_manager.get_past_landmarks(ancestor_state); ConstBitsetView future = @@ -57,7 +56,7 @@ double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( /* First pass: compute which op achieves how many landmarks. Along the way, mark action landmarks and add their cost to h. */ - for (auto &node : nodes) { + for (const auto &node : lm_graph) { int id = node->get_id(); if (future.test(id)) { const unordered_set &achievers = @@ -84,13 +83,13 @@ double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( /* TODO: Replace with Landmarks (to do so, we need some way to access the status of a Landmark without access to the ID, which is part of LandmarkNode). */ - vector relevant_lms; + vector relevant_lms; /* Second pass: remove landmarks from consideration that are covered by an action landmark; decrease the counters accordingly so that no unnecessary cost is assigned to these landmarks. */ - for (auto &node : nodes) { + for (const auto &node : lm_graph) { int id = node->get_id(); if (future.test(id)) { const unordered_set &achievers = diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index 8df5978aa5..408b79f518 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -129,7 +129,7 @@ void LandmarkFactory::discard_all_orderings() { if (log.is_at_least_normal()) { log << "Removing all orderings." << endl; } - for (auto &node : lm_graph->get_nodes()) { + for (const auto &node : *lm_graph) { node->children.clear(); node->parents.clear(); } diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 621b015cc1..d3ceb15aed 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -620,7 +620,7 @@ void LandmarkFactoryHM::discard_conjunctive_landmarks() { << " conjunctive landmarks" << endl; } lm_graph->remove_node_if( - [](const LandmarkNode &node) {return node.get_landmark().conjunctive;}); + [](const LandmarkNode &node) {return node.get_landmark().is_conjunctive;}); } } @@ -634,7 +634,7 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { VariablesProxy variables = task_proxy.get_variables(); // first_achievers are already filled in by compute_h_m_landmarks // here only have to do possible_achievers - for (auto &lm_node : lm_graph->get_nodes()) { + for (const auto &lm_node : *lm_graph) { Landmark &landmark = lm_node->get_landmark(); set candidates; // put all possible adders in candidates set @@ -998,10 +998,8 @@ void LandmarkFactoryHM::generate_landmarks( edge_add(*lm_node_table_[lm], *lm_node_table_[set_index], EdgeType::NATURAL); } - if (use_orders) { - for (int gn : h_m_table_[set_index].necessary) { - edge_add(*lm_node_table_[gn], *lm_node_table_[set_index], EdgeType::GREEDY_NECESSARY); - } + for (int gn : h_m_table_[set_index].necessary) { + edge_add(*lm_node_table_[gn], *lm_node_table_[set_index], EdgeType::GREEDY_NECESSARY); } } } diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index d838480ecf..a3d1b81b82 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -21,19 +21,19 @@ LandmarkFactoryMerged::LandmarkFactoryMerged( } LandmarkNode *LandmarkFactoryMerged::get_matching_landmark(const Landmark &landmark) const { - if (!landmark.disjunctive && !landmark.conjunctive) { + if (!landmark.is_disjunctive && !landmark.is_conjunctive) { const FactPair &lm_fact = landmark.facts[0]; if (lm_graph->contains_simple_landmark(lm_fact)) return &lm_graph->get_simple_landmark(lm_fact); else return nullptr; - } else if (landmark.disjunctive) { + } else if (landmark.is_disjunctive) { set lm_facts(landmark.facts.begin(), landmark.facts.end()); if (lm_graph->contains_identical_disjunctive_landmark(lm_facts)) return &lm_graph->get_disjunctive_landmark(landmark.facts[0]); else return nullptr; - } else if (landmark.conjunctive) { + } else if (landmark.is_conjunctive) { cerr << "Don't know how to handle conjunctive landmarks yet" << endl; utils::exit_with(ExitCode::SEARCH_UNSUPPORTED); } @@ -58,14 +58,13 @@ void LandmarkFactoryMerged::generate_landmarks( log << "Adding simple landmarks" << endl; } for (size_t i = 0; i < lm_graphs.size(); ++i) { - const LandmarkGraph::Nodes &nodes = lm_graphs[i]->get_nodes(); // TODO: loop over landmarks instead - for (auto &lm_node : nodes) { + for (const auto &lm_node : *lm_graphs[i]) { const Landmark &landmark = lm_node->get_landmark(); - if (landmark.conjunctive) { + if (landmark.is_conjunctive) { cerr << "Don't know how to handle conjunctive landmarks yet" << endl; utils::exit_with(ExitCode::SEARCH_UNSUPPORTED); - } else if (landmark.disjunctive) { + } else if (landmark.is_disjunctive) { continue; } else if (!lm_graph->contains_landmark(landmark.facts[0])) { Landmark copy(landmark); @@ -78,10 +77,9 @@ void LandmarkFactoryMerged::generate_landmarks( log << "Adding disjunctive landmarks" << endl; } for (size_t i = 0; i < lm_graphs.size(); ++i) { - const LandmarkGraph::Nodes &nodes = lm_graphs[i]->get_nodes(); - for (auto &lm_node : nodes) { + for (const auto &lm_node : *lm_graphs[i]) { const Landmark &landmark = lm_node->get_landmark(); - if (landmark.disjunctive) { + if (landmark.is_disjunctive) { /* TODO: It seems that disjunctive landmarks are only added if none of the facts it is made of is also there as a simple landmark. This should @@ -104,8 +102,7 @@ void LandmarkFactoryMerged::generate_landmarks( log << "Adding orderings" << endl; } for (size_t i = 0; i < lm_graphs.size(); ++i) { - const LandmarkGraph::Nodes &nodes = lm_graphs[i]->get_nodes(); - for (auto &from_orig : nodes) { + for (const auto &from_orig : *lm_graphs[i]) { LandmarkNode *from = get_matching_landmark(from_orig->get_landmark()); if (from) { for (const auto &to : from_orig->children) { diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index f298d7e37c..567fb94668 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -47,15 +47,15 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orders( */ State initial_state = task_proxy.get_initial_state(); int variables_size = task_proxy.get_variables().size(); - for (auto &node_p : lm_graph->get_nodes()) { + for (const auto &node_p : *lm_graph) { const Landmark &landmark = node_p->get_landmark(); - if (landmark.disjunctive) + if (landmark.is_disjunctive) continue; if (landmark.is_true_in_goal) { - for (auto &node2_p : lm_graph->get_nodes()) { + for (const auto &node2_p : *lm_graph) { const Landmark &landmark2 = node2_p->get_landmark(); - if (landmark == landmark2 || landmark2.disjunctive) + if (landmark == landmark2 || landmark2.is_disjunctive) continue; if (interferes(task_proxy, landmark2, landmark)) { edge_add(*node2_p, *node_p, EdgeType::REASONABLE); @@ -72,7 +72,7 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orders( for (const auto &p : node2_p.parents) { // find parent LandmarkNode &parent_node = *(p.first); const EdgeType &edge = p.second; - if (parent_node.get_landmark().disjunctive) + if (parent_node.get_landmark().is_disjunctive) continue; if (edge >= EdgeType::NATURAL && &parent_node != node_p.get()) { // find predecessors or parent and collect in "interesting nodes" @@ -86,7 +86,7 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orders( // with node_p. for (LandmarkNode *node2_p : interesting_nodes) { const Landmark &landmark2 = node2_p->get_landmark(); - if (landmark == landmark2 || landmark2.disjunctive) + if (landmark == landmark2 || landmark2.is_disjunctive) continue; if (interferes(task_proxy, landmark2, landmark)) { edge_add(*node2_p, *node_p, EdgeType::REASONABLE); @@ -109,7 +109,7 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( is the same as 2. */ assert(landmark_a != landmark_b); - assert(!landmark_a.disjunctive && !landmark_b.disjunctive); + assert(!landmark_a.is_disjunctive && !landmark_b.is_disjunctive); VariablesProxy variables = task_proxy.get_variables(); for (const FactPair &lm_fact_b : landmark_b.facts) { @@ -117,7 +117,7 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( for (const FactPair &lm_fact_a : landmark_a.facts) { FactProxy fact_a = variables[lm_fact_a.var].get_fact(lm_fact_a.value); if (lm_fact_a == lm_fact_b) { - if (!landmark_a.conjunctive || !landmark_b.conjunctive) + if (!landmark_a.is_conjunctive || !landmark_b.is_conjunctive) return false; else continue; @@ -130,7 +130,7 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( // 2. Shared effect e in all operators reaching a, and e, b are mutex // Skip this for conjunctive nodes a, as they are typically achieved through a // sequence of operators successively adding the parts of a - if (landmark_a.conjunctive) + if (landmark_a.is_conjunctive) continue; unordered_map shared_eff; diff --git a/src/search/landmarks/landmark_factory_relaxation.cc b/src/search/landmarks/landmark_factory_relaxation.cc index 63518d1c73..77ea071293 100644 --- a/src/search/landmarks/landmark_factory_relaxation.cc +++ b/src/search/landmarks/landmark_factory_relaxation.cc @@ -45,7 +45,7 @@ void LandmarkFactoryRelaxation::discard_noncausal_landmarks( bool LandmarkFactoryRelaxation::is_causal_landmark( const TaskProxy &task_proxy, Exploration &exploration, const Landmark &landmark) const { - assert(!landmark.conjunctive); + assert(!landmark.is_conjunctive); if (landmark.is_true_in_goal) return true; @@ -74,7 +74,7 @@ void LandmarkFactoryRelaxation::calc_achievers( const TaskProxy &task_proxy, Exploration &exploration) { assert(!achievers_calculated); VariablesProxy variables = task_proxy.get_variables(); - for (auto &lm_node : lm_graph->get_nodes()) { + for (const auto &lm_node : *lm_graph) { Landmark &landmark = lm_node->get_landmark(); for (const FactPair &lm_fact : landmark.facts) { const vector &ops = get_operators_including_eff(lm_fact); diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index bb1b592164..bd879f7a26 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -489,7 +489,7 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orders( if lmp is a simple landmark. */ const Landmark &landmark = lmp->get_landmark(); - if (landmark.disjunctive) + if (landmark.is_disjunctive) return; const FactPair &lm_fact = landmark.facts[0]; @@ -603,7 +603,7 @@ void LandmarkFactoryRpgSasp::find_forward_orders(const VariablesProxy &variables } void LandmarkFactoryRpgSasp::add_lm_forward_orders() { - for (auto &node : lm_graph->get_nodes()) { + for (const auto &node : *lm_graph) { for (const auto &node2_pair : forward_orders[node.get()]) { if (lm_graph->contains_simple_landmark(node2_pair)) { LandmarkNode &node2 = lm_graph->get_simple_landmark(node2_pair); @@ -626,7 +626,7 @@ void LandmarkFactoryRpgSasp::discard_disjunctive_landmarks() { << " disjunctive landmarks" << endl; } lm_graph->remove_node_if( - [](const LandmarkNode &node) {return node.get_landmark().disjunctive;}); + [](const LandmarkNode &node) {return node.get_landmark().is_disjunctive;}); } } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index bd86bc0c92..82038f1269 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -15,7 +15,7 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { std::list open_landmarks; std::vector> disjunction_classes; - std::unordered_map> forward_orders; + std::unordered_map> forward_orders; // dtg_successors[var_id][val] contains all successor values of val in the // domain transition graph for the variable diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index 1e70e41350..daed466a6f 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -19,12 +19,12 @@ LandmarkGraph::LandmarkGraph() int LandmarkGraph::get_num_edges() const { int total = 0; - for (auto &node : nodes) + for (const auto &node : nodes) total += node->children.size(); return total; } -LandmarkNode *LandmarkGraph::get_node(int i) const { +const LandmarkNode *LandmarkGraph::get_node(int i) const { return nodes[i].get(); } @@ -86,28 +86,39 @@ bool LandmarkGraph::contains_landmark(const FactPair &lm) const { return contains_simple_landmark(lm) || contains_disjunctive_landmark(lm); } +LandmarkNode *LandmarkGraph::add_node(Landmark &&landmark) { + unique_ptr new_node = + utils::make_unique_ptr(move(landmark)); + nodes.push_back(move(new_node)); + return nodes.back().get(); +} + LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark) { - assert(landmark.conjunctive + assert(landmark.is_conjunctive || all_of(landmark.facts.begin(), landmark.facts.end(), [&](const FactPair &lm_fact) { return !contains_landmark(lm_fact); })); - unique_ptr new_node = - utils::make_unique_ptr(move(landmark)); - LandmarkNode *new_node_p = new_node.get(); + /* + TODO: Avoid having to fetch landmark after moving it. This will only be + possible after removing the assumption that landmarks don't overlap + because we wont need `disjunctive_landmarks_to_nodes` and + `simple_landmarks_to_nodes` anymore. + */ + LandmarkNode *new_node = add_node(move(landmark)); const Landmark &lm = new_node->get_landmark(); - nodes.push_back(move(new_node)); - if (lm.disjunctive) { + + if (lm.is_disjunctive) { for (const FactPair &lm_fact : lm.facts) { - disjunctive_landmarks_to_nodes.emplace(lm_fact, new_node_p); + disjunctive_landmarks_to_nodes.emplace(lm_fact, new_node); } ++num_disjunctive_landmarks; - } else if (lm.conjunctive) { + } else if (lm.is_conjunctive) { ++num_conjunctive_landmarks; } else { - simple_landmarks_to_nodes.emplace(lm.facts.front(), new_node_p); + simple_landmarks_to_nodes.emplace(lm.facts.front(), new_node); } - return *new_node_p; + return *new_node; } void LandmarkGraph::remove_node_occurrences(LandmarkNode *node) { @@ -122,12 +133,12 @@ void LandmarkGraph::remove_node_occurrences(LandmarkNode *node) { assert(child_node.parents.find(node) == child_node.parents.end()); } const Landmark &landmark = node->get_landmark(); - if (landmark.disjunctive) { + if (landmark.is_disjunctive) { --num_disjunctive_landmarks; for (const FactPair &lm_fact : landmark.facts) { disjunctive_landmarks_to_nodes.erase(lm_fact); } - } else if (landmark.conjunctive) { + } else if (landmark.is_conjunctive) { --num_conjunctive_landmarks; } else { simple_landmarks_to_nodes.erase(landmark.facts[0]); @@ -137,7 +148,7 @@ void LandmarkGraph::remove_node_occurrences(LandmarkNode *node) { void LandmarkGraph::remove_node(LandmarkNode *node) { remove_node_occurrences(node); auto it = find_if(nodes.begin(), nodes.end(), - [&node](unique_ptr &n) { + [&node](const auto &n) { return n.get() == node; }); assert(it != nodes.end()); @@ -146,20 +157,21 @@ void LandmarkGraph::remove_node(LandmarkNode *node) { void LandmarkGraph::remove_node_if( const function &remove_node_condition) { - for (auto &node : nodes) { + for (const auto &node : nodes) { if (remove_node_condition(*node)) { remove_node_occurrences(node.get()); } } nodes.erase(remove_if(nodes.begin(), nodes.end(), - [&remove_node_condition](const unique_ptr &node) { + [&remove_node_condition]( + const unique_ptr &node) { return remove_node_condition(*node); }), nodes.end()); } void LandmarkGraph::set_landmark_ids() { int id = 0; - for (auto &lmn : nodes) { + for (const auto &lmn : nodes) { lmn->set_id(id); ++id; } diff --git a/src/search/landmarks/landmark_graph.h b/src/search/landmarks/landmark_graph.h index 647b5318b6..fd90dccf61 100644 --- a/src/search/landmarks/landmark_graph.h +++ b/src/search/landmarks/landmark_graph.h @@ -23,7 +23,7 @@ enum class EdgeType { sense that, e.g., every greedy-necessary ordering is also natural and reasonable. (It is a sad fact of terminology that necessary is indeed a special case of greedy-necessary, i.e., every necessary ordering is - greedy-necessary, but not vice versa. + greedy-necessary, but not vice versa.) */ NECESSARY = 3, GREEDY_NECESSARY = 2, @@ -35,10 +35,14 @@ class LandmarkNode { int id; Landmark landmark; public: - LandmarkNode(Landmark &&landmark) + explicit LandmarkNode(Landmark &&landmark) : id(-1), landmark(std::move(landmark)) { } + bool operator==(const LandmarkNode &other) const { + return this == &other; + } + std::unordered_map parents; std::unordered_map children; @@ -46,7 +50,7 @@ class LandmarkNode { return id; } - // TODO: Should possibly not be changeable + // TODO: Should possibly not be changeable. void set_id(int new_id) { assert(id == -1 || new_id == id); id = new_id; @@ -63,34 +67,43 @@ class LandmarkNode { }; class LandmarkGraph { -public: - /* - TODO: get rid of this by removing get_nodes() and instead offering - functions begin() and end() with an iterator class, so users of the - LandmarkGraph can do loops like this: - for (const LandmarkNode &n : graph) {...} - */ - using Nodes = std::vector>; -private: + /* TODO: Make this a vector once landmark graphs remain + static. (issue993) */ + std::vector> nodes; + int num_conjunctive_landmarks; int num_disjunctive_landmarks; utils::HashMap simple_landmarks_to_nodes; utils::HashMap disjunctive_landmarks_to_nodes; - Nodes nodes; void remove_node_occurrences(LandmarkNode *node); + LandmarkNode *add_node(Landmark &&landmark); public: + // TODO: Remove once landmark graphs remain static. (issue993) + using iterator = std::vector>::iterator; + iterator begin() { + return nodes.begin(); + } + iterator end() { + return nodes.end(); + } + + using const_iterator = + std::vector>::const_iterator; + const const_iterator begin() const { + return nodes.cbegin(); + } + const const_iterator end() const { + return nodes.cend(); + } + /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ LandmarkGraph(); - // needed by both landmarkgraph-factories and non-landmarkgraph-factories - const Nodes &get_nodes() const { - return nodes; - } - // needed by both landmarkgraph-factories and non-landmarkgraph-factories + // Needed by both landmark graph factories and non-landmark-graph factories. int get_num_landmarks() const { return nodes.size(); } @@ -108,8 +121,8 @@ class LandmarkGraph { when moving landmark graph creation there. */ int get_num_edges() const; - // only needed by non-landmarkgraph-factories - LandmarkNode *get_node(int index) const; + // Only needed by non-landmarkgraph-factories. + const LandmarkNode *get_node(int index) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ LandmarkNode &get_simple_landmark(const FactPair &fact) const; @@ -118,21 +131,23 @@ class LandmarkGraph { LandmarkNode &get_disjunctive_landmark(const FactPair &fact) const; /* This is needed only by landmark graph factories and will disappear - when moving landmark graph creation there. It is not needed by - HMLandmarkFactory*/ + when moving landmark graph creation there. It is not needed by + HMLandmarkFactory. */ bool contains_simple_landmark(const FactPair &lm) const; - /* Only used internally. */ + // Only used internally. bool contains_disjunctive_landmark(const FactPair &lm) const; /* This is needed only by landmark graph factories and will disappear - when moving landmark graph creation there. It is not needed by - HMLandmarkFactory*/ - bool contains_overlapping_disjunctive_landmark(const std::set &lm) const; + when moving landmark graph creation there. It is not needed by + HMLandmarkFactory. */ + bool contains_overlapping_disjunctive_landmark( + const std::set &lm) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ - bool contains_identical_disjunctive_landmark(const std::set &lm) const; + bool contains_identical_disjunctive_landmark( + const std::set &lm) const; /* This is needed only by landmark graph factories and will disappear - when moving landmark graph creation there. It is not needed by - HMLandmarkFactory*/ + when moving landmark graph creation there. It is not needed by + HMLandmarkFactory. */ bool contains_landmark(const FactPair &fact) const; /* This is needed only by landmark graph factories and will disappear diff --git a/src/search/landmarks/landmark_heuristic.cc b/src/search/landmarks/landmark_heuristic.cc index 218c880ea1..444b89ce36 100644 --- a/src/search/landmarks/landmark_heuristic.cc +++ b/src/search/landmarks/landmark_heuristic.cc @@ -66,7 +66,7 @@ bool LandmarkHeuristic::landmark_graph_has_cycle_of_natural_orderings() { int num_landmarks = lm_graph->get_num_landmarks(); vector closed(num_landmarks, false); vector visited(num_landmarks, false); - for (auto &node : lm_graph->get_nodes()) { + for (const auto &node : *lm_graph) { if (depth_first_search_for_cycle_of_natural_orderings( *node, closed, visited)) { return true; @@ -121,10 +121,10 @@ void LandmarkHeuristic::compute_landmark_graph( } void LandmarkHeuristic::compute_landmarks_achieved_by_fact() { - for (const auto &node : lm_graph->get_nodes()) { + for (const auto &node : *lm_graph) { const int id = node->get_id(); const Landmark &lm = node->get_landmark(); - if (lm.conjunctive) { + if (lm.is_conjunctive) { /* TODO: We currently have no way to declare operators preferred based on conjunctive landmarks. We consider this a bug and want diff --git a/src/search/landmarks/landmark_status_manager.cc b/src/search/landmarks/landmark_status_manager.cc index c76af33a41..f4f96b2517 100644 --- a/src/search/landmarks/landmark_status_manager.cc +++ b/src/search/landmarks/landmark_status_manager.cc @@ -6,9 +6,9 @@ using namespace std; namespace landmarks { -static vector get_goal_landmarks(const LandmarkGraph &graph) { - vector goals; - for (auto &node : graph.get_nodes()) { +static vector get_goal_landmarks(const LandmarkGraph &graph) { + vector goals; + for (const auto &node : graph) { if (node->get_landmark().is_true_in_goal) { goals.push_back(node.get()); } @@ -16,12 +16,12 @@ static vector get_goal_landmarks(const LandmarkGraph &graph) { return goals; } -static vector>> get_greedy_necessary_children( +static vector>> get_greedy_necessary_children( const LandmarkGraph &graph) { - vector>> orderings; - for (auto &node : graph.get_nodes()) { - vector greedy_necessary_children; - for (auto &child : node->children) { + vector>> orderings; + for (const auto &node : graph) { + vector greedy_necessary_children; + for (const auto &child : node->children) { if (child.second == EdgeType::GREEDY_NECESSARY) { greedy_necessary_children.push_back(child.first); } @@ -33,12 +33,12 @@ static vector>> get_greedy_necessary return orderings; } -static vector>> get_reasonable_parents( +static vector>> get_reasonable_parents( const LandmarkGraph &graph) { - vector>> orderings; - for (auto &node : graph.get_nodes()) { - vector reasonable_parents; - for (auto &parent : node->parents) { + vector>> orderings; + for (const auto &node : graph) { + vector reasonable_parents; + for (const auto &parent : node->parents) { if (parent.second == EdgeType::REASONABLE) { reasonable_parents.push_back(parent.first); } @@ -57,15 +57,15 @@ LandmarkStatusManager::LandmarkStatusManager( bool progress_reasonable_orderings) : lm_graph(graph), goal_landmarks(progress_goals ? get_goal_landmarks(graph) - : vector{}), + : vector{}), greedy_necessary_children( progress_greedy_necessary_orderings ? get_greedy_necessary_children(graph) - : vector>>{}), + : vector>>{}), reasonable_parents( progress_reasonable_orderings ? get_reasonable_parents(graph) - : vector>>{}), + : vector>>{}), /* We initialize to true in *past_landmarks* because true is the neutral element of conjunction/set intersection. */ past_landmarks(vector(graph.get_num_landmarks(), true)), @@ -94,7 +94,7 @@ void LandmarkStatusManager::progress_initial_state(const State &initial_state) { BitsetView past = get_past_landmarks(initial_state); BitsetView future = get_future_landmarks(initial_state); - for (auto &node : lm_graph.get_nodes()) { + for (const auto &node : lm_graph) { int id = node->get_id(); const Landmark &lm = node->get_landmark(); if (lm.is_true_in_state(initial_state)) { @@ -156,7 +156,7 @@ void LandmarkStatusManager::progress_landmarks( ConstBitsetView &parent_past, ConstBitsetView &parent_future, const State &parent_ancestor_state, BitsetView &past, BitsetView &future, const State &ancestor_state) { - for (auto &node : lm_graph.get_nodes()) { + for (const auto &node : lm_graph) { int id = node->get_id(); const Landmark &lm = node->get_landmark(); if (parent_future.test(id)) { diff --git a/src/search/landmarks/landmark_status_manager.h b/src/search/landmarks/landmark_status_manager.h index 2458e827cd..d53815d072 100644 --- a/src/search/landmarks/landmark_status_manager.h +++ b/src/search/landmarks/landmark_status_manager.h @@ -11,9 +11,9 @@ class LandmarkNode; class LandmarkStatusManager { LandmarkGraph &lm_graph; - const std::vector goal_landmarks; - const std::vector>> greedy_necessary_children; - const std::vector>> reasonable_parents; + const std::vector goal_landmarks; + const std::vector>> greedy_necessary_children; + const std::vector>> reasonable_parents; PerStateBitset past_landmarks; PerStateBitset future_landmarks; diff --git a/src/search/landmarks/landmark_sum_heuristic.cc b/src/search/landmarks/landmark_sum_heuristic.cc index 08067e7707..5932ea0fd5 100644 --- a/src/search/landmarks/landmark_sum_heuristic.cc +++ b/src/search/landmarks/landmark_sum_heuristic.cc @@ -76,7 +76,7 @@ void LandmarkSumHeuristic::compute_landmark_costs() { int min_operator_cost = task_properties::get_min_operator_cost(task_proxy); min_first_achiever_costs.reserve(lm_graph->get_num_landmarks()); min_possible_achiever_costs.reserve(lm_graph->get_num_landmarks()); - for (auto &node : lm_graph->get_nodes()) { + for (const auto &node : *lm_graph) { if (node->get_landmark().is_derived) { min_first_achiever_costs.push_back(min_operator_cost); min_possible_achiever_costs.push_back(min_operator_cost); diff --git a/src/search/landmarks/util.cc b/src/search/landmarks/util.cc index fe5020397f..88cef452bf 100644 --- a/src/search/landmarks/util.cc +++ b/src/search/landmarks/util.cc @@ -93,9 +93,9 @@ static void dump_node( const Landmark &landmark = node.get_landmark(); for (FactPair fact : landmark.facts) { if (!first) { - if (landmark.disjunctive) { + if (landmark.is_disjunctive) { cout << " | "; - } else if (landmark.conjunctive) { + } else if (landmark.is_conjunctive) { cout << " & "; } } @@ -143,7 +143,7 @@ void dump_landmark_graph( log << "Dumping landmark graph: " << endl; cout << "digraph G {\n"; - for (const unique_ptr &node : graph.get_nodes()) { + for (const auto &node : graph) { dump_node(task_proxy, *node, log); for (const auto &child : node->children) { const LandmarkNode *child_node = child.first; From 7e476310f81147096015a111a1f186cee0b3d80f Mon Sep 17 00:00:00 2001 From: Remo Christen Date: Fri, 7 Feb 2025 17:01:23 +0100 Subject: [PATCH 003/108] Rename edge to ordering. --- .../cartesian_abstractions/utils_landmarks.cc | 6 +-- src/search/landmarks/landmark.cc | 4 +- src/search/landmarks/landmark_factory.cc | 14 +++--- src/search/landmarks/landmark_factory.h | 2 +- src/search/landmarks/landmark_factory_h_m.cc | 8 +-- .../landmarks/landmark_factory_merged.cc | 4 +- .../landmark_factory_reasonable_orders_hps.cc | 38 ++++++++------ .../landmarks/landmark_factory_rpg_sasp.cc | 50 ++++++++++--------- .../landmarks/landmark_factory_rpg_sasp.h | 10 ++-- .../landmarks/landmark_factory_zhu_givan.cc | 2 +- src/search/landmarks/landmark_graph.cc | 2 +- src/search/landmarks/landmark_graph.h | 8 +-- src/search/landmarks/landmark_heuristic.cc | 4 +- .../landmarks/landmark_status_manager.cc | 4 +- src/search/landmarks/util.cc | 17 ++++--- 15 files changed, 91 insertions(+), 82 deletions(-) diff --git a/src/search/cartesian_abstractions/utils_landmarks.cc b/src/search/cartesian_abstractions/utils_landmarks.cc index 4f54db31ad..c8cb1e00a6 100644 --- a/src/search/cartesian_abstractions/utils_landmarks.cc +++ b/src/search/cartesian_abstractions/utils_landmarks.cc @@ -55,8 +55,7 @@ VarToValues get_prev_landmarks(const LandmarkNode *node) { VarToValues groups; vector open; unordered_set closed; - for (const auto &parent_and_edge : node->parents) { - const LandmarkNode *parent = parent_and_edge.first; + for (const auto &[parent, type] : node->parents) { open.push_back(parent); } while (!open.empty()) { @@ -67,8 +66,7 @@ VarToValues get_prev_landmarks(const LandmarkNode *node) { closed.insert(ancestor); FactPair ancestor_fact = get_fact(ancestor->get_landmark()); groups[ancestor_fact.var].push_back(ancestor_fact.value); - for (const auto &parent_and_edge : ancestor->parents) { - const LandmarkNode *parent = parent_and_edge.first; + for (const auto &[parent, type] : ancestor->parents) { open.push_back(parent); } } diff --git a/src/search/landmarks/landmark.cc b/src/search/landmarks/landmark.cc index ed3c3f7f2e..890d469f93 100644 --- a/src/search/landmarks/landmark.cc +++ b/src/search/landmarks/landmark.cc @@ -5,8 +5,8 @@ using namespace std; namespace landmarks { bool Landmark::is_true_in_state(const State &state) const { auto is_fact_true_in_state = [&](const FactPair &fact) { - return state[fact.var].get_value() == fact.value; - }; + return state[fact.var].get_value() == fact.value; + }; if (is_disjunctive) { return any_of(facts.cbegin(), facts.cend(), is_fact_true_in_state); } else { diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index 408b79f518..71af129c4e 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -73,7 +73,7 @@ shared_ptr LandmarkFactory::compute_lm_graph( << " landmarks, of which " << lm_graph->get_num_disjunctive_landmarks() << " are disjunctive and " << lm_graph->get_num_conjunctive_landmarks() << " are conjunctive." << endl; - log << lm_graph->get_num_edges() << " edges" << endl; + log << lm_graph->get_num_orderings() << " orderings" << endl; } } @@ -96,13 +96,13 @@ bool LandmarkFactory::is_landmark_precondition( return false; } -void LandmarkFactory::edge_add(LandmarkNode &from, LandmarkNode &to, - EdgeType type) { - /* Adds an edge in the landmarks graph. If an edge between the same - landmarks is already present, the stronger edge type wins. */ +void LandmarkFactory::add_ordering(LandmarkNode &from, LandmarkNode &to, + OrderingType type) { + /* Adds an ordering in the landmarks graph. If an ordering between the same + landmarks is already present, the stronger ordering type wins. */ assert(&from != &to); - // If edge already exists, remove if weaker + // If ordering already exists, remove if weaker. if (from.children.find(&to) != from.children.end() && from.children.find( &to)->second < type) { from.children.erase(&to); @@ -112,7 +112,7 @@ void LandmarkFactory::edge_add(LandmarkNode &from, LandmarkNode &to, assert(to.parents.find(&from) == to.parents.end()); assert(from.children.find(&to) == from.children.end()); } - // If edge does not exist (or has just been removed), insert + // If ordering does not exist (or has just been removed), insert. if (from.children.find(&to) == from.children.end()) { assert(to.parents.find(&from) == to.parents.end()); from.children.emplace(&to, type); diff --git a/src/search/landmarks/landmark_factory.h b/src/search/landmarks/landmark_factory.h index 35d329c7ea..9ab6ebb867 100644 --- a/src/search/landmarks/landmark_factory.h +++ b/src/search/landmarks/landmark_factory.h @@ -44,7 +44,7 @@ class LandmarkFactory { std::shared_ptr lm_graph; bool achievers_calculated = false; - void edge_add(LandmarkNode &from, LandmarkNode &to, EdgeType type); + void add_ordering(LandmarkNode &from, LandmarkNode &to, OrderingType type); void discard_all_orderings(); diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index d3ceb15aed..9c38275416 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -989,17 +989,19 @@ void LandmarkFactoryHM::generate_landmarks( set_minus(h_m_table_[f1].landmarks, h_m_table_[f1].necessary); } - // and add the edges + // and add the orderings. for (int set_index : all_lms) { for (int lm : h_m_table_[set_index].landmarks) { assert(lm_node_table_.find(lm) != lm_node_table_.end()); assert(lm_node_table_.find(set_index) != lm_node_table_.end()); - edge_add(*lm_node_table_[lm], *lm_node_table_[set_index], EdgeType::NATURAL); + add_ordering(*lm_node_table_[lm], *lm_node_table_[set_index], + OrderingType::NATURAL); } for (int gn : h_m_table_[set_index].necessary) { - edge_add(*lm_node_table_[gn], *lm_node_table_[set_index], EdgeType::GREEDY_NECESSARY); + add_ordering(*lm_node_table_[gn], *lm_node_table_[set_index], + OrderingType::GREEDY_NECESSARY); } } } diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index a3d1b81b82..b718496a97 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -107,10 +107,10 @@ void LandmarkFactoryMerged::generate_landmarks( if (from) { for (const auto &to : from_orig->children) { const LandmarkNode *to_orig = to.first; - EdgeType e_type = to.second; + OrderingType type = to.second; LandmarkNode *to_node = get_matching_landmark(to_orig->get_landmark()); if (to_node) { - edge_add(*from, *to_node, e_type); + add_ordering(*from, *to_node, type); } else { if (log.is_at_least_normal()) { log << "Discarded to ordering" << endl; diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 567fb94668..db3cd0546c 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -58,38 +58,44 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orders( if (landmark == landmark2 || landmark2.is_disjunctive) continue; if (interferes(task_proxy, landmark2, landmark)) { - edge_add(*node2_p, *node_p, EdgeType::REASONABLE); + add_ordering(*node2_p, *node_p, OrderingType::REASONABLE); } } } else { - // Collect candidates for reasonable orders in "interesting nodes". - // Use hash set to filter duplicates. + /* + Collect candidates for reasonable orders in "interesting nodes". + Use hash set to filter duplicates.Use hash set to filter + duplicates. + */ unordered_set interesting_nodes(variables_size); for (const auto &child : node_p->children) { const LandmarkNode &node2_p = *child.first; - const EdgeType &edge2 = child.second; - if (edge2 >= EdgeType::GREEDY_NECESSARY) { // found node2_p: node_p ->_gn node2_p - for (const auto &p : node2_p.parents) { // find parent + const OrderingType &type2 = child.second; + if (type2 >= OrderingType::GREEDY_NECESSARY) { + // Found node2_p: node_p ->_gn node2_p. + for (const auto &p : node2_p.parents) { LandmarkNode &parent_node = *(p.first); - const EdgeType &edge = p.second; + const OrderingType &type = p.second; if (parent_node.get_landmark().is_disjunctive) continue; - if (edge >= EdgeType::NATURAL && &parent_node != node_p.get()) { - // find predecessors or parent and collect in "interesting nodes" + if (type >= OrderingType::NATURAL && + &parent_node != node_p.get()) { + /* Find predecessors or parent and collect in + "interesting nodes". */ interesting_nodes.insert(&parent_node); collect_ancestors(interesting_nodes, parent_node); } } } } - // Insert reasonable orders between those members of "interesting nodes" that interfere - // with node_p. + /* Insert reasonable orders between those members of + "interesting nodes" that interfere with node_p. */ for (LandmarkNode *node2_p : interesting_nodes) { const Landmark &landmark2 = node2_p->get_landmark(); if (landmark == landmark2 || landmark2.is_disjunctive) continue; if (interferes(task_proxy, landmark2, landmark)) { - edge_add(*node2_p, *node_p, EdgeType::REASONABLE); + add_ordering(*node2_p, *node_p, OrderingType::REASONABLE); } } } @@ -215,8 +221,8 @@ void LandmarkFactoryReasonableOrdersHPS::collect_ancestors( unordered_set closed_nodes; for (const auto &p : node.parents) { LandmarkNode &parent = *(p.first); - const EdgeType &edge = p.second; - if (edge >= EdgeType::NATURAL && closed_nodes.count(&parent) == 0) { + const OrderingType &type = p.second; + if (type >= OrderingType::NATURAL && closed_nodes.count(&parent) == 0) { open_nodes.push_back(&parent); closed_nodes.insert(&parent); result.insert(&parent); @@ -226,8 +232,8 @@ void LandmarkFactoryReasonableOrdersHPS::collect_ancestors( LandmarkNode &node2 = *(open_nodes.front()); for (const auto &p : node2.parents) { LandmarkNode &parent = *(p.first); - const EdgeType &edge = p.second; - if (edge >= EdgeType::NATURAL && closed_nodes.count(&parent) == 0) { + const OrderingType &type = p.second; + if (type >= OrderingType::NATURAL && closed_nodes.count(&parent) == 0) { open_nodes.push_back(&parent); closed_nodes.insert(&parent); result.insert(&parent); diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index bd879f7a26..44cdbdb1b5 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -145,15 +145,15 @@ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_lm( } void LandmarkFactoryRpgSasp::found_simple_lm_and_order( - const FactPair &a, LandmarkNode &b, EdgeType t) { - if (lm_graph->contains_simple_landmark(a)) { - LandmarkNode &simple_lm = lm_graph->get_simple_landmark(a); - edge_add(simple_lm, b, t); + const FactPair &atom, LandmarkNode &node, OrderingType type) { + if (lm_graph->contains_simple_landmark(atom)) { + LandmarkNode &simple_landmark = lm_graph->get_simple_landmark(atom); + add_ordering(simple_landmark, node, type); return; } - Landmark landmark({a}, false, false); - if (lm_graph->contains_disjunctive_landmark(a)) { + Landmark landmark({atom}, false, false); + if (lm_graph->contains_disjunctive_landmark(atom)) { // In issue1004, we fixed a bug in this part of the code. It now removes // the disjunctive landmark along with all its orderings from the // landmark graph and adds a new simple landmark node. Before this @@ -163,7 +163,7 @@ void LandmarkFactoryRpgSasp::found_simple_lm_and_order( // Simple landmarks are more informative than disjunctive ones, // remove disj. landmark and add simple one - LandmarkNode *disj_lm = &lm_graph->get_disjunctive_landmark(a); + LandmarkNode *disj_lm = &lm_graph->get_disjunctive_landmark(atom); // Remove all pointers to disj_lm from internal data structures (i.e., // the list of open landmarks and forward orders) @@ -173,7 +173,7 @@ void LandmarkFactoryRpgSasp::found_simple_lm_and_order( } forward_orders.erase(disj_lm); - // Retrieve incoming edges from disj_lm + // Retrieve incoming orderings from disj_lm. vector predecessors; predecessors.reserve(disj_lm->parents.size()); for (auto &pred : disj_lm->parents) { @@ -186,28 +186,28 @@ void LandmarkFactoryRpgSasp::found_simple_lm_and_order( // Add simple landmark node LandmarkNode &simple_lm = lm_graph->add_landmark(move(landmark)); open_landmarks.push_back(&simple_lm); - edge_add(simple_lm, b, t); + add_ordering(simple_lm, node, type); // Add incoming orderings of replaced disj_lm as natural orderings to // simple_lm for (LandmarkNode *pred : predecessors) { - edge_add(*pred, simple_lm, EdgeType::NATURAL); + add_ordering(*pred, simple_lm, OrderingType::NATURAL); } } else { LandmarkNode &simple_lm = lm_graph->add_landmark(move(landmark)); open_landmarks.push_back(&simple_lm); - edge_add(simple_lm, b, t); + add_ordering(simple_lm, node, type); } } void LandmarkFactoryRpgSasp::found_disj_lm_and_order( - const TaskProxy &task_proxy, const set &a, - LandmarkNode &b, EdgeType t) { + const TaskProxy &task_proxy, const set &atoms, + LandmarkNode &node, OrderingType type) { bool simple_lm_exists = false; // TODO: assign with FactPair::no_fact FactPair lm_prop = FactPair::no_fact; State initial_state = task_proxy.get_initial_state(); - for (const FactPair &lm : a) { + for (const FactPair &lm : atoms) { if (initial_state[lm.var].get_value() == lm.value) { return; } @@ -222,21 +222,22 @@ void LandmarkFactoryRpgSasp::found_disj_lm_and_order( if (simple_lm_exists) { // Note: don't add orders as we can't be sure that they're correct return; - } else if (lm_graph->contains_overlapping_disjunctive_landmark(a)) { - if (lm_graph->contains_identical_disjunctive_landmark(a)) { + } else if (lm_graph->contains_overlapping_disjunctive_landmark(atoms)) { + if (lm_graph->contains_identical_disjunctive_landmark(atoms)) { // LM already exists, just add order. - new_lm_node = &lm_graph->get_disjunctive_landmark(*a.begin()); - edge_add(*new_lm_node, b, t); + new_lm_node = &lm_graph->get_disjunctive_landmark(*atoms.begin()); + add_ordering(*new_lm_node, node, type); return; } // LM overlaps with existing disj. LM, do not add. return; } // This LM and no part of it exist, add the LM to the landmarks graph. - Landmark landmark(vector(a.begin(), a.end()), true, false); + Landmark landmark(vector(atoms.begin(), + atoms.end()), true, false); new_lm_node = &lm_graph->add_landmark(move(landmark)); open_landmarks.push_back(new_lm_node); - edge_add(*new_lm_node, b, t); + add_ordering(*new_lm_node, node, type); } void LandmarkFactoryRpgSasp::compute_shared_preconditions( @@ -439,7 +440,7 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( for (const auto &pre : shared_pre) { found_simple_lm_and_order( FactPair(pre.first, pre.second), *lm_node, - EdgeType::GREEDY_NECESSARY); + OrderingType::GREEDY_NECESSARY); } // Extract additional orders from the relaxed planning graph and DTG. approximate_lookahead_orders(task_proxy, reached, lm_node); @@ -453,7 +454,7 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( if (preconditions.size() < 5) { found_disj_lm_and_order( task_proxy, preconditions, *lm_node, - EdgeType::GREEDY_NECESSARY); + OrderingType::GREEDY_NECESSARY); } } } @@ -518,7 +519,8 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orders( initial state, we have found a new landmark. */ if (!domain_connectivity(initial_state, lm_fact, exclude)) - found_simple_lm_and_order(FactPair(lm_fact.var, value), *lmp, EdgeType::NATURAL); + found_simple_lm_and_order(FactPair(lm_fact.var, value), *lmp, + OrderingType::NATURAL); } } @@ -607,7 +609,7 @@ void LandmarkFactoryRpgSasp::add_lm_forward_orders() { for (const auto &node2_pair : forward_orders[node.get()]) { if (lm_graph->contains_simple_landmark(node2_pair)) { LandmarkNode &node2 = lm_graph->get_simple_landmark(node2_pair); - edge_add(*node, node2, EdgeType::NATURAL); + add_ordering(*node, node2, OrderingType::NATURAL); } } forward_orders[node.get()].clear(); diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index 82038f1269..b2a66445f8 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -45,12 +45,12 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { virtual void generate_relaxed_landmarks( const std::shared_ptr &task, Exploration &exploration) override; - void found_simple_lm_and_order(const FactPair &a, LandmarkNode &b, - EdgeType t); + void found_simple_lm_and_order(const FactPair &atom, LandmarkNode &node, + OrderingType type); void found_disj_lm_and_order(const TaskProxy &task_proxy, - const std::set &a, - LandmarkNode &b, - EdgeType t); + const std::set &atoms, + LandmarkNode &node, + OrderingType type); void approximate_lookahead_orders(const TaskProxy &task_proxy, const std::vector> &reached, LandmarkNode *lmp); diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index f40484032f..a8585de5e1 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -91,7 +91,7 @@ void LandmarkFactoryZhuGivan::extract_landmarks( // Add order: lm ->_{nat} lm assert(node->parents.find(lm_node) == node->parents.end()); assert(lm_node->children.find(node) == lm_node->children.end()); - edge_add(*node, *lm_node, EdgeType::NATURAL); + add_ordering(*node, *lm_node, OrderingType::NATURAL); } } } diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index daed466a6f..d6b0841e47 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -17,7 +17,7 @@ LandmarkGraph::LandmarkGraph() : num_conjunctive_landmarks(0), num_disjunctive_landmarks(0) { } -int LandmarkGraph::get_num_edges() const { +int LandmarkGraph::get_num_orderings() const { int total = 0; for (const auto &node : nodes) total += node->children.size(); diff --git a/src/search/landmarks/landmark_graph.h b/src/search/landmarks/landmark_graph.h index fd90dccf61..94c818ca8a 100644 --- a/src/search/landmarks/landmark_graph.h +++ b/src/search/landmarks/landmark_graph.h @@ -17,7 +17,7 @@ #include namespace landmarks { -enum class EdgeType { +enum class OrderingType { /* NOTE: The code relies on the fact that larger numbers are stronger in the sense that, e.g., every greedy-necessary ordering is also natural and @@ -43,8 +43,8 @@ class LandmarkNode { return this == &other; } - std::unordered_map parents; - std::unordered_map children; + std::unordered_map parents; + std::unordered_map children; int get_id() const { return id; @@ -119,7 +119,7 @@ class LandmarkGraph { } /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ - int get_num_edges() const; + int get_num_orderings() const; // Only needed by non-landmarkgraph-factories. const LandmarkNode *get_node(int index) const; diff --git a/src/search/landmarks/landmark_heuristic.cc b/src/search/landmarks/landmark_heuristic.cc index 444b89ce36..8333093ce2 100644 --- a/src/search/landmarks/landmark_heuristic.cc +++ b/src/search/landmarks/landmark_heuristic.cc @@ -86,7 +86,7 @@ bool LandmarkHeuristic::depth_first_search_for_cycle_of_natural_orderings( visited[id] = true; for (auto &child : node.children) { - if (child.second >= EdgeType::NATURAL) { + if (child.second >= OrderingType::NATURAL) { if (depth_first_search_for_cycle_of_natural_orderings( *child.first, closed, visited)) { return true; @@ -115,7 +115,7 @@ void LandmarkHeuristic::compute_landmark_graph( << " are disjunctive and " << lm_graph->get_num_conjunctive_landmarks() << " are conjunctive." << endl; - log << "Landmark graph contains " << lm_graph->get_num_edges() + log << "Landmark graph contains " << lm_graph->get_num_orderings() << " orderings." << endl; } } diff --git a/src/search/landmarks/landmark_status_manager.cc b/src/search/landmarks/landmark_status_manager.cc index f4f96b2517..6118ff800a 100644 --- a/src/search/landmarks/landmark_status_manager.cc +++ b/src/search/landmarks/landmark_status_manager.cc @@ -22,7 +22,7 @@ static vector>> get_gree for (const auto &node : graph) { vector greedy_necessary_children; for (const auto &child : node->children) { - if (child.second == EdgeType::GREEDY_NECESSARY) { + if (child.second == OrderingType::GREEDY_NECESSARY) { greedy_necessary_children.push_back(child.first); } } @@ -39,7 +39,7 @@ static vector>> get_reas for (const auto &node : graph) { vector reasonable_parents; for (const auto &parent : node->parents) { - if (parent.second == EdgeType::REASONABLE) { + if (parent.second == OrderingType::REASONABLE) { reasonable_parents.push_back(parent.first); } } diff --git a/src/search/landmarks/util.cc b/src/search/landmarks/util.cc index 88cef452bf..3113209666 100644 --- a/src/search/landmarks/util.cc +++ b/src/search/landmarks/util.cc @@ -114,20 +114,21 @@ static void dump_node( } } -static void dump_edge(int from, int to, EdgeType edge, utils::LogProxy &log) { +static void dump_ordering(int from, int to, OrderingType type, + const utils::LogProxy &log) { if (log.is_at_least_debug()) { cout << " lm" << from << " -> lm" << to << " [label="; - switch (edge) { - case EdgeType::NECESSARY: + switch (type) { + case OrderingType::NECESSARY: cout << "\"nec\""; break; - case EdgeType::GREEDY_NECESSARY: + case OrderingType::GREEDY_NECESSARY: cout << "\"gn\""; break; - case EdgeType::NATURAL: + case OrderingType::NATURAL: cout << "\"n\""; break; - case EdgeType::REASONABLE: + case OrderingType::REASONABLE: cout << "\"r\", style=dashed"; break; } @@ -147,8 +148,8 @@ void dump_landmark_graph( dump_node(task_proxy, *node, log); for (const auto &child : node->children) { const LandmarkNode *child_node = child.first; - const EdgeType &edge = child.second; - dump_edge(node->get_id(), child_node->get_id(), edge, log); + const OrderingType &type = child.second; + dump_ordering(node->get_id(), child_node->get_id(), type, log); } } cout << "}" << endl; From bf8198bcb5a5d1910b5fc5571e5e1fb635ea8d4a Mon Sep 17 00:00:00 2001 From: Remo Christen Date: Fri, 7 Feb 2025 18:29:29 +0100 Subject: [PATCH 004/108] Rename functions and variables. --- .../subtask_generators.cc | 4 +- .../cartesian_abstractions/utils_landmarks.cc | 36 ++--- .../cartesian_abstractions/utils_landmarks.h | 6 +- src/search/landmarks/landmark.cc | 8 +- src/search/landmarks/landmark.h | 10 +- src/search/landmarks/landmark_factory.cc | 4 +- src/search/landmarks/landmark_factory_h_m.cc | 10 +- .../landmarks/landmark_factory_merged.cc | 38 +++--- .../landmark_factory_reasonable_orders_hps.cc | 28 ++-- .../landmarks/landmark_factory_relaxation.cc | 24 ++-- .../landmarks/landmark_factory_rpg_sasp.cc | 124 ++++++++++-------- .../landmarks/landmark_factory_zhu_givan.cc | 4 +- src/search/landmarks/landmark_graph.cc | 109 ++++++++------- src/search/landmarks/landmark_graph.h | 16 +-- src/search/landmarks/landmark_heuristic.cc | 22 ++-- src/search/landmarks/landmark_heuristic.h | 4 +- src/search/landmarks/util.cc | 10 +- 17 files changed, 237 insertions(+), 220 deletions(-) diff --git a/src/search/cartesian_abstractions/subtask_generators.cc b/src/search/cartesian_abstractions/subtask_generators.cc index 772dbfba3f..4b9f12a11b 100644 --- a/src/search/cartesian_abstractions/subtask_generators.cc +++ b/src/search/cartesian_abstractions/subtask_generators.cc @@ -159,8 +159,8 @@ SharedTasks LandmarkDecomposition::get_subtasks( const shared_ptr landmark_graph = get_landmark_graph(task); utils::HashMap fact_to_landmark_map = - get_fact_to_landmark_map(landmark_graph); - Facts landmark_facts = get_fact_landmarks(*landmark_graph); + get_atom_to_landmark_map(landmark_graph); + Facts landmark_facts = get_atom_landmarks(*landmark_graph); filter_and_order_facts(task, fact_order, landmark_facts, *rng, log); for (const FactPair &landmark : landmark_facts) { shared_ptr subtask = diff --git a/src/search/cartesian_abstractions/utils_landmarks.cc b/src/search/cartesian_abstractions/utils_landmarks.cc index c8cb1e00a6..47c21f8222 100644 --- a/src/search/cartesian_abstractions/utils_landmarks.cc +++ b/src/search/cartesian_abstractions/utils_landmarks.cc @@ -13,10 +13,10 @@ using namespace std; using namespace landmarks; namespace cartesian_abstractions { -static FactPair get_fact(const Landmark &landmark) { +static FactPair get_atom(const Landmark &landmark) { // We assume that the given Landmarks are from an h^m landmark graph with m=1. - assert(landmark.facts.size() == 1); - return landmark.facts[0]; + assert(landmark.atoms.size() == 1); + return landmark.atoms[0]; } shared_ptr get_landmark_graph( @@ -27,28 +27,28 @@ shared_ptr get_landmark_graph( return lm_graph_factory.compute_lm_graph(task); } -vector get_fact_landmarks(const LandmarkGraph &graph) { - vector facts; - facts.reserve(graph.get_num_landmarks()); +vector get_atom_landmarks(const LandmarkGraph &graph) { + vector atoms; + atoms.reserve(graph.get_num_landmarks()); for (const auto &node : graph) { - facts.push_back(get_fact(node->get_landmark())); + atoms.push_back(get_atom(node->get_landmark())); } - sort(facts.begin(), facts.end()); - return facts; + sort(atoms.begin(), atoms.end()); + return atoms; } -utils::HashMap get_fact_to_landmark_map( +utils::HashMap get_atom_to_landmark_map( const shared_ptr &graph) { - // All landmarks are simple, i.e., each has exactly one fact. + // All landmarks are simple, i.e., each has exactly one atom. assert(all_of(graph->begin(), graph->end(), [](auto &node) { - return node->get_landmark().facts.size() == 1; + return node->get_landmark().atoms.size() == 1; })); - utils::HashMap fact_to_landmark_map; + utils::HashMap atom_to_landmark_map; for (const auto &node : *graph) { - const FactPair &fact = node->get_landmark().facts[0]; - fact_to_landmark_map[fact] = node.get(); + const FactPair &atom = node->get_landmark().atoms[0]; + atom_to_landmark_map[atom] = node.get(); } - return fact_to_landmark_map; + return atom_to_landmark_map; } VarToValues get_prev_landmarks(const LandmarkNode *node) { @@ -64,8 +64,8 @@ VarToValues get_prev_landmarks(const LandmarkNode *node) { if (closed.find(ancestor) != closed.end()) continue; closed.insert(ancestor); - FactPair ancestor_fact = get_fact(ancestor->get_landmark()); - groups[ancestor_fact.var].push_back(ancestor_fact.value); + FactPair ancestor_atom = get_atom(ancestor->get_landmark()); + groups[ancestor_atom.var].push_back(ancestor_atom.value); for (const auto &[parent, type] : ancestor->parents) { open.push_back(parent); } diff --git a/src/search/cartesian_abstractions/utils_landmarks.h b/src/search/cartesian_abstractions/utils_landmarks.h index 43e1c23477..dd9e808eeb 100644 --- a/src/search/cartesian_abstractions/utils_landmarks.h +++ b/src/search/cartesian_abstractions/utils_landmarks.h @@ -20,16 +20,16 @@ using VarToValues = std::unordered_map>; extern std::shared_ptr get_landmark_graph( const std::shared_ptr &task); -extern std::vector get_fact_landmarks( +extern std::vector get_atom_landmarks( const landmarks::LandmarkGraph &graph); -extern utils::HashMap get_fact_to_landmark_map( +extern utils::HashMap get_atom_to_landmark_map( const std::shared_ptr &graph); /* Do a breadth-first search through the landmark graph ignoring duplicates. Start at the given node and collect for each variable the - facts that have to be made true before the given node can be true for + atoms that have to be made true before the given node can be true for the first time. */ extern VarToValues get_prev_landmarks( diff --git a/src/search/landmarks/landmark.cc b/src/search/landmarks/landmark.cc index 890d469f93..8b6d0b0b04 100644 --- a/src/search/landmarks/landmark.cc +++ b/src/search/landmarks/landmark.cc @@ -4,14 +4,14 @@ using namespace std; namespace landmarks { bool Landmark::is_true_in_state(const State &state) const { - auto is_fact_true_in_state = [&](const FactPair &fact) { - return state[fact.var].get_value() == fact.value; + auto is_atom_true_in_state = [&](const FactPair &atom) { + return state[atom.var].get_value() == atom.value; }; if (is_disjunctive) { - return any_of(facts.cbegin(), facts.cend(), is_fact_true_in_state); + return any_of(atoms.cbegin(), atoms.cend(), is_atom_true_in_state); } else { // Is conjunctive or simple. - return all_of(facts.cbegin(), facts.cend(), is_fact_true_in_state); + return all_of(atoms.cbegin(), atoms.cend(), is_atom_true_in_state); } } } diff --git a/src/search/landmarks/landmark.h b/src/search/landmarks/landmark.h index 380ae93029..17f83d8ccf 100644 --- a/src/search/landmarks/landmark.h +++ b/src/search/landmarks/landmark.h @@ -8,15 +8,15 @@ namespace landmarks { class Landmark { public: - Landmark(std::vector _facts, bool is_disjunctive, + Landmark(std::vector _atoms, bool is_disjunctive, bool is_conjunctive, bool is_true_in_goal = false, bool is_derived = false) - : facts(move(_facts)), is_disjunctive(is_disjunctive), + : atoms(move(_atoms)), is_disjunctive(is_disjunctive), is_conjunctive(is_conjunctive), is_true_in_goal(is_true_in_goal), is_derived(is_derived) { assert(!(is_conjunctive && is_disjunctive)); - assert((is_conjunctive && facts.size() > 1) || - (is_disjunctive && facts.size() > 1) || facts.size() == 1); + assert((is_conjunctive && atoms.size() > 1) || + (is_disjunctive && atoms.size() > 1) || atoms.size() == 1); } bool operator==(const Landmark &other) const { @@ -27,7 +27,7 @@ class Landmark { return !(*this == other); } - std::vector facts; + std::vector atoms; const bool is_disjunctive; const bool is_conjunctive; bool is_true_in_goal; diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index 71af129c4e..de890a4d4b 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -88,8 +88,8 @@ bool LandmarkFactory::is_landmark_precondition( /* Test whether the landmark is used by the operator as a precondition. A disjunctive landmarks is used if one of its disjuncts is used. */ for (FactProxy pre : op.get_preconditions()) { - for (const FactPair &lm_fact : landmark.facts) { - if (pre.get_pair() == lm_fact) + for (const FactPair &atom : landmark.atoms) { + if (pre.get_pair() == atom) return true; } } diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 9c38275416..3b3e0062ac 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -638,8 +638,8 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { Landmark &landmark = lm_node->get_landmark(); set candidates; // put all possible adders in candidates set - for (const FactPair &lm_fact : landmark.facts) { - const vector &ops = get_operators_including_eff(lm_fact); + for (const FactPair &atom : landmark.atoms) { + const vector &ops = get_operators_including_eff(atom); candidates.insert(ops.begin(), ops.end()); } @@ -647,8 +647,8 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { FluentSet post = get_operator_postcondition(variables.size(), operators[op_id]); FluentSet pre = get_operator_precondition(operators[op_id]); size_t j; - for (j = 0; j < landmark.facts.size(); ++j) { - const FactPair &lm_fact = landmark.facts[j]; + for (j = 0; j < landmark.atoms.size(); ++j) { + const FactPair &lm_fact = landmark.atoms[j]; // action adds this element of lm as well if (find(post.begin(), post.end(), lm_fact) != post.end()) continue; @@ -676,7 +676,7 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { break; } } - if (j == landmark.facts.size()) { + if (j == landmark.atoms.size()) { // not inconsistent with any of the other landmark fluents landmark.possible_achievers.insert(op_id); } diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index b718496a97..910738ff3b 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -20,17 +20,18 @@ LandmarkFactoryMerged::LandmarkFactoryMerged( lm_factories(lm_factories) { } -LandmarkNode *LandmarkFactoryMerged::get_matching_landmark(const Landmark &landmark) const { +LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( + const Landmark &landmark) const { if (!landmark.is_disjunctive && !landmark.is_conjunctive) { - const FactPair &lm_fact = landmark.facts[0]; - if (lm_graph->contains_simple_landmark(lm_fact)) - return &lm_graph->get_simple_landmark(lm_fact); + const FactPair &atom = landmark.atoms[0]; + if (lm_graph->contains_simple_landmark(atom)) + return &lm_graph->get_simple_landmark_node(atom); else return nullptr; } else if (landmark.is_disjunctive) { - set lm_facts(landmark.facts.begin(), landmark.facts.end()); - if (lm_graph->contains_identical_disjunctive_landmark(lm_facts)) - return &lm_graph->get_disjunctive_landmark(landmark.facts[0]); + set atoms(landmark.atoms.begin(), landmark.atoms.end()); + if (lm_graph->contains_identical_disjunctive_landmark(atoms)) + return &lm_graph->get_disjunctive_landmark_node(landmark.atoms[0]); else return nullptr; } else if (landmark.is_conjunctive) { @@ -66,7 +67,7 @@ void LandmarkFactoryMerged::generate_landmarks( utils::exit_with(ExitCode::SEARCH_UNSUPPORTED); } else if (landmark.is_disjunctive) { continue; - } else if (!lm_graph->contains_landmark(landmark.facts[0])) { + } else if (!lm_graph->contains_landmark(landmark.atoms[0])) { Landmark copy(landmark); lm_graph->add_landmark(move(copy)); } @@ -80,16 +81,19 @@ void LandmarkFactoryMerged::generate_landmarks( for (const auto &lm_node : *lm_graphs[i]) { const Landmark &landmark = lm_node->get_landmark(); if (landmark.is_disjunctive) { -/* - TODO: It seems that disjunctive landmarks are only added if none of the - facts it is made of is also there as a simple landmark. This should - either be more general (add only if none of its subset is already there) - or it should be done only upon request (e.g., heuristics that consider - orders might want to keep all landmarks). -*/ + /* + TODO: It seems that disjunctive landmarks are only added if + none of the atoms it is made of is also there as a simple + landmark. This should either be more general (add only if none + of its subset is already there) or it should be done only upon + request (e.g., heuristics that consider orders might want to + keep all landmarks). + */ bool exists = - any_of(landmark.facts.begin(), landmark.facts.end(), - [&](const FactPair &lm_fact) {return lm_graph->contains_landmark(lm_fact);}); + any_of(landmark.atoms.begin(), landmark.atoms.end(), + [&](const FactPair &lm_fact) { + return lm_graph->contains_landmark(lm_fact); + }); if (!exists) { Landmark copy(landmark); lm_graph->add_landmark(move(copy)); diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index db3cd0546c..6524d1a4b1 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -118,11 +118,11 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( assert(!landmark_a.is_disjunctive && !landmark_b.is_disjunctive); VariablesProxy variables = task_proxy.get_variables(); - for (const FactPair &lm_fact_b : landmark_b.facts) { - FactProxy fact_b = variables[lm_fact_b.var].get_fact(lm_fact_b.value); - for (const FactPair &lm_fact_a : landmark_a.facts) { - FactProxy fact_a = variables[lm_fact_a.var].get_fact(lm_fact_a.value); - if (lm_fact_a == lm_fact_b) { + for (const FactPair &atom_b : landmark_b.atoms) { + FactProxy fact_b = variables[atom_b.var].get_fact(atom_b.value); + for (const FactPair &atom_a : landmark_a.atoms) { + FactProxy fact_a = variables[atom_a.var].get_fact(atom_a.value); + if (atom_a == atom_b) { if (!landmark_a.is_conjunctive || !landmark_b.is_conjunctive) return false; else @@ -141,7 +141,8 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( unordered_map shared_eff; bool init = true; - const vector &op_or_axiom_ids = get_operators_including_eff(lm_fact_a); + const vector &op_or_axiom_ids = + get_operators_including_eff(atom_a); // Intersect operators that achieve a one by one for (int op_or_axiom_id : op_or_axiom_ids) { // If no shared effect among previous operators, break @@ -153,15 +154,17 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( // e.g. in Schedule. There, the same effect is conditioned on a disjunction // of conditions of which one will always be true. We test for a simple kind // of these trivial conditions here.) - EffectsProxy effects = get_operator_or_axiom(task_proxy, op_or_axiom_id).get_effects(); + EffectsProxy effects = + get_operator_or_axiom(task_proxy, op_or_axiom_id).get_effects(); set trivially_conditioned_effects; - bool trivial_conditioned_effects_found = effect_always_happens(variables, effects, - trivially_conditioned_effects); + bool trivial_conditioned_effects_found = + effect_always_happens(variables, effects, + trivially_conditioned_effects); unordered_map next_eff; for (EffectProxy effect : effects) { FactPair effect_fact = effect.get_fact().get_pair(); if (effect.get_conditions().empty() && - effect_fact.var != lm_fact_a.var) { + effect_fact.var != atom_a.var) { next_eff.emplace(effect_fact.var, effect_fact.value); } else if (trivial_conditioned_effects_found && trivially_conditioned_effects.find(effect_fact) @@ -184,7 +187,8 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( } // Test whether one of the shared effects is inconsistent with b for (const pair &eff : shared_eff) { - const FactProxy &effect_fact = variables[eff.first].get_fact(eff.second); + const FactProxy &effect_fact = + variables[eff.first].get_fact(eff.second); if (effect_fact != fact_a && effect_fact != fact_b && effect_fact.is_mutex(fact_b)) @@ -214,7 +218,7 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( void LandmarkFactoryReasonableOrdersHPS::collect_ancestors( unordered_set &result, LandmarkNode &node) { - /* Returns all ancestors in the landmark graph of landmark node "start" */ + // Returns all ancestors in the landmark graph of landmark node "start". // There could be cycles if use_reasonable == true list open_nodes; diff --git a/src/search/landmarks/landmark_factory_relaxation.cc b/src/search/landmarks/landmark_factory_relaxation.cc index 77ea071293..d0d8a61ea3 100644 --- a/src/search/landmarks/landmark_factory_relaxation.cc +++ b/src/search/landmarks/landmark_factory_relaxation.cc @@ -76,11 +76,11 @@ void LandmarkFactoryRelaxation::calc_achievers( VariablesProxy variables = task_proxy.get_variables(); for (const auto &lm_node : *lm_graph) { Landmark &landmark = lm_node->get_landmark(); - for (const FactPair &lm_fact : landmark.facts) { - const vector &ops = get_operators_including_eff(lm_fact); + for (const FactPair &atom : landmark.atoms) { + const vector &ops = get_operators_including_eff(atom); landmark.possible_achievers.insert(ops.begin(), ops.end()); - if (variables[lm_fact.var].is_derived()) + if (variables[atom.var].is_derived()) landmark.is_derived = true; } @@ -100,9 +100,9 @@ void LandmarkFactoryRelaxation::calc_achievers( bool LandmarkFactoryRelaxation::relaxed_task_solvable( const TaskProxy &task_proxy, Exploration &exploration, - const Landmark &exclude) const { - vector> reached = compute_relaxed_reachability(exploration, - exclude); + const Landmark &landmark) const { + vector> reached = + compute_relaxed_reachability(exploration, landmark); for (FactProxy goal : task_proxy.get_goals()) { if (!reached[goal.get_variable().get_id()][goal.get_value()]) { @@ -113,12 +113,12 @@ bool LandmarkFactoryRelaxation::relaxed_task_solvable( } vector> LandmarkFactoryRelaxation::compute_relaxed_reachability( - Exploration &exploration, const Landmark &exclude) const { - // Extract propositions from "exclude" + Exploration &exploration, const Landmark &landmark) const { + // Extract propositions from `landmark`. vector excluded_op_ids; - vector excluded_props(exclude.facts.begin(), exclude.facts.end()); - - return exploration.compute_relaxed_reachability(excluded_props, - excluded_op_ids); + vector excluded_props( + landmark.atoms.begin(), landmark.atoms.end()); + return exploration.compute_relaxed_reachability( + excluded_props, excluded_op_ids); } } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 44cdbdb1b5..9a2b8f55e3 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -95,10 +95,11 @@ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_lm( for (EffectProxy effect : effects) { FactProxy effect_fact = effect.get_fact(); int var_id = effect_fact.get_variable().get_id(); - if (!has_precondition_on_var[var_id] && effect_fact.get_variable().get_domain_size() == 2) { - for (const FactPair &lm_fact : landmark.facts) { - if (lm_fact.var == var_id && - initial_state[var_id].get_value() != lm_fact.value) { + if (!has_precondition_on_var[var_id] && + effect_fact.get_variable().get_domain_size() == 2) { + for (const FactPair &atom : landmark.atoms) { + if (atom.var == var_id && + initial_state[var_id].get_value() != atom.value) { result.emplace(var_id, initial_state[var_id].get_value()); break; } @@ -110,8 +111,8 @@ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_lm( set lm_props_achievable; for (EffectProxy effect : effects) { FactProxy effect_fact = effect.get_fact(); - for (size_t j = 0; j < landmark.facts.size(); ++j) - if (landmark.facts[j] == effect_fact.get_pair()) + for (size_t j = 0; j < landmark.atoms.size(); ++j) + if (landmark.atoms[j] == effect_fact.get_pair()) lm_props_achievable.insert(j); } // Intersect effect conditions of all effects that can achieve lmp @@ -123,7 +124,7 @@ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_lm( if (!init && intersection.empty()) break; unordered_map current_cond; - if (landmark.facts[lm_prop] == effect_fact.get_pair()) { + if (landmark.atoms[lm_prop] == effect_fact.get_pair()) { EffectConditionsProxy effect_conditions = effect.get_conditions(); if (effect_conditions.empty()) { intersection.clear(); @@ -147,7 +148,8 @@ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_lm( void LandmarkFactoryRpgSasp::found_simple_lm_and_order( const FactPair &atom, LandmarkNode &node, OrderingType type) { if (lm_graph->contains_simple_landmark(atom)) { - LandmarkNode &simple_landmark = lm_graph->get_simple_landmark(atom); + LandmarkNode &simple_landmark = + lm_graph->get_simple_landmark_node(atom); add_ordering(simple_landmark, node, type); return; } @@ -163,7 +165,7 @@ void LandmarkFactoryRpgSasp::found_simple_lm_and_order( // Simple landmarks are more informative than disjunctive ones, // remove disj. landmark and add simple one - LandmarkNode *disj_lm = &lm_graph->get_disjunctive_landmark(atom); + LandmarkNode *disj_lm = &lm_graph->get_disjunctive_landmark_node(atom); // Remove all pointers to disj_lm from internal data structures (i.e., // the list of open landmarks and forward orders) @@ -225,7 +227,8 @@ void LandmarkFactoryRpgSasp::found_disj_lm_and_order( } else if (lm_graph->contains_overlapping_disjunctive_landmark(atoms)) { if (lm_graph->contains_identical_disjunctive_landmark(atoms)) { // LM already exists, just add order. - new_lm_node = &lm_graph->get_disjunctive_landmark(*atoms.begin()); + new_lm_node = + &lm_graph->get_disjunctive_landmark_node(*atoms.begin()); add_ordering(*new_lm_node, node, type); return; } @@ -234,7 +237,7 @@ void LandmarkFactoryRpgSasp::found_disj_lm_and_order( } // This LM and no part of it exist, add the LM to the landmarks graph. Landmark landmark(vector(atoms.begin(), - atoms.end()), true, false); + atoms.end()), true, false); new_lm_node = &lm_graph->add_landmark(move(landmark)); open_landmarks.push_back(new_lm_node); add_ordering(*new_lm_node, node, type); @@ -248,8 +251,8 @@ void LandmarkFactoryRpgSasp::compute_shared_preconditions( achieve landmark bp, given the reachability in the relaxed planning graph. */ bool init = true; - for (const FactPair &lm_fact : landmark.facts) { - const vector &op_ids = get_operators_including_eff(lm_fact); + for (const FactPair &atom : landmark.atoms) { + const vector &op_ids = get_operators_including_eff(atom); for (int op_or_axiom_id : op_ids) { OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axiom_id); @@ -270,35 +273,37 @@ void LandmarkFactoryRpgSasp::compute_shared_preconditions( } } -static string get_predicate_for_fact(const VariablesProxy &variables, +static string get_predicate_for_atom(const VariablesProxy &variables, int var_no, int value) { - const string fact_name = variables[var_no].get_fact(value).get_name(); - if (fact_name == "") + const string atom_name = variables[var_no].get_fact(value).get_name(); + if (atom_name == "") return ""; int predicate_pos = 0; - if (fact_name.substr(0, 5) == "Atom ") { + if (atom_name.substr(0, 5) == "Atom ") { predicate_pos = 5; - } else if (fact_name.substr(0, 12) == "NegatedAtom ") { + } else if (atom_name.substr(0, 12) == "NegatedAtom ") { predicate_pos = 12; } - size_t paren_pos = fact_name.find('(', predicate_pos); + size_t paren_pos = atom_name.find('(', predicate_pos); if (predicate_pos == 0 || paren_pos == string::npos) { - cerr << "error: cannot extract predicate from fact: " - << fact_name << endl; + cerr << "error: cannot extract predicate from atom: " + << atom_name << endl; utils::exit_with(ExitCode::SEARCH_INPUT_ERROR); } - return string(fact_name.begin() + predicate_pos, fact_name.begin() + paren_pos); + return string(atom_name.begin() + + predicate_pos, atom_name.begin() + paren_pos); } -void LandmarkFactoryRpgSasp::build_disjunction_classes(const TaskProxy &task_proxy) { +void LandmarkFactoryRpgSasp::build_disjunction_classes( + const TaskProxy &task_proxy) { /* The RHW landmark generation method only allows disjunctive landmarks where all atoms stem from the same PDDL predicate. This functionality is implemented via this method. - The approach we use is to map each fact (var/value pair) to an - equivalence class (representing all facts with the same + The approach we use is to map each atom (var/value pair) to an + equivalence class (representing all atoms with the same predicate). The special class "-1" means "cannot be part of any - disjunctive landmark". This is used for facts that do not + disjunctive landmark". This is used for atoms that do not belong to any predicate. Similar methods for restricting disjunctive landmarks could be @@ -321,7 +326,7 @@ void LandmarkFactoryRpgSasp::build_disjunction_classes(const TaskProxy &task_pro int num_values = var.get_domain_size(); disjunction_classes[var.get_id()].reserve(num_values); for (int value = 0; value < num_values; ++value) { - string predicate = get_predicate_for_fact(variables, var.get_id(), value); + string predicate = get_predicate_for_atom(variables, var.get_id(), value); int disj_class; if (predicate.empty()) { disj_class = -1; @@ -342,14 +347,15 @@ void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( /* Compute disjunctive preconditions from all operators than can potentially achieve landmark bp, given the reachability in the relaxed planning graph. - A disj. precondition is a set of facts which contains one precondition - fact from each of the operators, which we additionally restrict so that - each fact in the set stems from the same PDDL predicate. + A disj. precondition is a set of atoms which contains one precondition + atom from each of the operators, which we additionally restrict so that + each atom in the set stems from the same PDDL predicate. */ vector op_or_axiom_ids; - for (const FactPair &lm_fact : landmark.facts) { - const vector &tmp_op_or_axiom_ids = get_operators_including_eff(lm_fact); + for (const FactPair &atom : landmark.atoms) { + const vector &tmp_op_or_axiom_ids = + get_operators_including_eff(atom); for (int op_or_axiom_id : tmp_op_or_axiom_ids) op_or_axiom_ids.push_back(op_or_axiom_id); } @@ -367,16 +373,16 @@ void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( for (const auto &pre : next_pre) { int disj_class = disjunction_classes[pre.first][pre.second]; if (disj_class == -1) { - // This fact may not participate in any disjunctive LMs - // since it has no associated predicate. + /* This atom may not participate in any disjunctive + landmarks since it has no associated predicate. */ continue; } // Only deal with propositions that are not shared preconditions // (those have been found already and are simple landmarks). - const FactPair pre_fact(pre.first, pre.second); - if (!lm_graph->contains_simple_landmark(pre_fact)) { - preconditions[disj_class].push_back(pre_fact); + const FactPair atom(pre.first, pre.second); + if (!lm_graph->contains_simple_landmark(atom)) { + preconditions[disj_class].push_back(atom); used_operators[disj_class].insert(i); } } @@ -474,7 +480,8 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( } void LandmarkFactoryRpgSasp::approximate_lookahead_orders( - const TaskProxy &task_proxy, const vector> &reached, LandmarkNode *lmp) { + const TaskProxy &task_proxy, const vector> &reached, + LandmarkNode *node) { /* Find all var-val pairs that can only be reached after the landmark (according to relaxed plan graph as captured in reached). @@ -483,25 +490,25 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orders( at the moment we don't know which of these var-val pairs will be LMs). */ VariablesProxy variables = task_proxy.get_variables(); - find_forward_orders(variables, reached, lmp); + find_forward_orders(variables, reached, node); /* Use domain transition graphs to find further orders. Only possible if lmp is a simple landmark. */ - const Landmark &landmark = lmp->get_landmark(); + const Landmark &landmark = node->get_landmark(); if (landmark.is_disjunctive) return; - const FactPair &lm_fact = landmark.facts[0]; + const FactPair &atom = landmark.atoms[0]; /* Collect in *unreached* all values of the LM variable that cannot be reached before the LM value (in the relaxed plan graph). */ - int domain_size = variables[lm_fact.var].get_domain_size(); + int domain_size = variables[atom.var].get_domain_size(); unordered_set unreached(domain_size); for (int value = 0; value < domain_size; ++value) - if (!reached[lm_fact.var][value] && lm_fact.value != value) + if (!reached[atom.var][value] && atom.value != value) unreached.insert(value); /* The set *exclude* will contain all those values of the LM variable that @@ -510,7 +517,7 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orders( */ State initial_state = task_proxy.get_initial_state(); for (int value = 0; value < domain_size; ++value) - if (unreached.find(value) == unreached.end() && lm_fact.value != value) { + if (unreached.find(value) == unreached.end() && atom.value != value) { unordered_set exclude(domain_size); exclude = unreached; exclude.insert(value); @@ -518,15 +525,15 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orders( If that value is crucial for achieving the LM from the initial state, we have found a new landmark. */ - if (!domain_connectivity(initial_state, lm_fact, exclude)) - found_simple_lm_and_order(FactPair(lm_fact.var, value), *lmp, + if (!domain_connectivity(initial_state, atom, exclude)) + found_simple_lm_and_order(FactPair(atom.var, value), *node, OrderingType::NATURAL); } } -bool LandmarkFactoryRpgSasp::domain_connectivity(const State &initial_state, - const FactPair &landmark, - const unordered_set &exclude) { +bool LandmarkFactoryRpgSasp::domain_connectivity( + const State &initial_state, const FactPair &landmark, + const unordered_set &exclude) { /* Tests whether in the domain transition graph of the LM variable, there is a path from the initial state value to the LM value, without passing through @@ -561,9 +568,9 @@ bool LandmarkFactoryRpgSasp::domain_connectivity(const State &initial_state, return true; } -void LandmarkFactoryRpgSasp::find_forward_orders(const VariablesProxy &variables, - const vector> &reached, - LandmarkNode *lm_node) { +void LandmarkFactoryRpgSasp::find_forward_orders( + const VariablesProxy &variables, const vector> &reached, + LandmarkNode *lm_node) { /* lm_node is ordered before any var-val pair that cannot be reached before lm_node according to relaxed planning graph (as captured in reached). @@ -576,14 +583,14 @@ void LandmarkFactoryRpgSasp::find_forward_orders(const VariablesProxy &variables const FactPair fact(var.get_id(), value); bool insert = true; - for (const FactPair &lm_fact : lm_node->get_landmark().facts) { - if (fact != lm_fact) { + for (const FactPair &atom : lm_node->get_landmark().atoms) { + if (fact != atom) { // Make sure there is no operator that reaches both lm and (var, value) at the same time bool intersection_empty = true; const vector &reach_fact = get_operators_including_eff(fact); const vector &reach_lm = - get_operators_including_eff(lm_fact); + get_operators_including_eff(atom); for (size_t j = 0; j < reach_fact.size() && intersection_empty; ++j) for (size_t k = 0; k < reach_lm.size() && intersection_empty; ++k) @@ -608,7 +615,8 @@ void LandmarkFactoryRpgSasp::add_lm_forward_orders() { for (const auto &node : *lm_graph) { for (const auto &node2_pair : forward_orders[node.get()]) { if (lm_graph->contains_simple_landmark(node2_pair)) { - LandmarkNode &node2 = lm_graph->get_simple_landmark(node2_pair); + LandmarkNode &node2 = + lm_graph->get_simple_landmark_node(node2_pair); add_ordering(*node, node2, OrderingType::NATURAL); } } @@ -628,7 +636,9 @@ void LandmarkFactoryRpgSasp::discard_disjunctive_landmarks() { << " disjunctive landmarks" << endl; } lm_graph->remove_node_if( - [](const LandmarkNode &node) {return node.get_landmark().is_disjunctive;}); + [](const LandmarkNode &node) { + return node.get_landmark().is_disjunctive; + }); } } diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index a8585de5e1..7cf1e95574 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -65,7 +65,7 @@ void LandmarkFactoryZhuGivan::extract_landmarks( FactPair goal_lm = goal.get_pair(); LandmarkNode *lm_node; if (lm_graph->contains_simple_landmark(goal_lm)) { - lm_node = &lm_graph->get_simple_landmark(goal_lm); + lm_node = &lm_graph->get_simple_landmark_node(goal_lm); lm_node->get_landmark().is_true_in_goal = true; } else { Landmark landmark({goal_lm}, false, false, true); @@ -86,7 +86,7 @@ void LandmarkFactoryZhuGivan::extract_landmarks( Landmark landmark({lm}, false, false); node = &lm_graph->add_landmark(move(landmark)); } else { - node = &lm_graph->get_simple_landmark(lm); + node = &lm_graph->get_simple_landmark_node(lm); } // Add order: lm ->_{nat} lm assert(node->parents.find(lm_node) == node->parents.end()); diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index d6b0841e47..0306acc5b6 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -28,62 +28,63 @@ const LandmarkNode *LandmarkGraph::get_node(int i) const { return nodes[i].get(); } -LandmarkNode &LandmarkGraph::get_simple_landmark(const FactPair &fact) const { - assert(contains_simple_landmark(fact)); - return *(simple_landmarks_to_nodes.find(fact)->second); +LandmarkNode &LandmarkGraph::get_simple_landmark_node( + const FactPair &atom) const { + assert(contains_simple_landmark(atom)); + return *(simple_landmarks_to_nodes.find(atom)->second); } // needed only by landmarkgraph-factories. -LandmarkNode &LandmarkGraph::get_disjunctive_landmark(const FactPair &fact) const { +LandmarkNode &LandmarkGraph::get_disjunctive_landmark_node( + const FactPair &atom) const { /* Note: this only works because every proposition appears in only one disjunctive landmark. */ - assert(!contains_simple_landmark(fact)); - assert(contains_disjunctive_landmark(fact)); - return *(disjunctive_landmarks_to_nodes.find(fact)->second); + assert(!contains_simple_landmark(atom)); + assert(contains_disjunctive_landmark(atom)); + return *(disjunctive_landmarks_to_nodes.find(atom)->second); } -bool LandmarkGraph::contains_simple_landmark(const FactPair &lm) const { - return simple_landmarks_to_nodes.count(lm) != 0; +bool LandmarkGraph::contains_simple_landmark(const FactPair &atom) const { + return simple_landmarks_to_nodes.count(atom) != 0; } -bool LandmarkGraph::contains_disjunctive_landmark(const FactPair &lm) const { - return disjunctive_landmarks_to_nodes.count(lm) != 0; +bool LandmarkGraph::contains_disjunctive_landmark(const FactPair &atom) const { + return disjunctive_landmarks_to_nodes.count(atom) != 0; } bool LandmarkGraph::contains_overlapping_disjunctive_landmark( - const set &lm) const { - // Test whether ONE of the facts is present in some disjunctive landmark. - for (const FactPair &lm_fact : lm) { - if (contains_disjunctive_landmark(lm_fact)) + const set &atoms) const { + // Test whether ONE of the atoms is present in some disjunctive landmark. + for (const FactPair &atom : atoms) { + if (contains_disjunctive_landmark(atom)) return true; } return false; } bool LandmarkGraph::contains_identical_disjunctive_landmark( - const set &lm) const { - /* Test whether a disjunctive landmark exists which consists EXACTLY of - the facts in lm. */ - LandmarkNode *lmn = nullptr; - for (const FactPair &lm_fact : lm) { - auto it2 = disjunctive_landmarks_to_nodes.find(lm_fact); - if (it2 == disjunctive_landmarks_to_nodes.end()) + const set &atoms) const { + LandmarkNode *node = nullptr; + for (const FactPair &atom : atoms) { + auto it = disjunctive_landmarks_to_nodes.find(atom); + if (it == disjunctive_landmarks_to_nodes.end()) return false; else { - if (lmn && lmn != it2->second) { + if (node && node != it->second) { return false; - } else if (!lmn) - lmn = it2->second; + } else if (!node) + node = it->second; } } return true; } -bool LandmarkGraph::contains_landmark(const FactPair &lm) const { - /* Note: this only checks for one fact whether it's part of a landmark, +bool LandmarkGraph::contains_landmark(const FactPair &atom) const { + /* Note: this only checks for one atom whether it's part of a landmark, hence only simple and disjunctive landmarks are checked. */ - return contains_simple_landmark(lm) || contains_disjunctive_landmark(lm); + return contains_simple_landmark(atom) || + contains_disjunctive_landmark(atom); } LandmarkNode *LandmarkGraph::add_node(Landmark &&landmark) { @@ -93,11 +94,11 @@ LandmarkNode *LandmarkGraph::add_node(Landmark &&landmark) { return nodes.back().get(); } -LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark) { - assert(landmark.is_conjunctive - || all_of(landmark.facts.begin(), landmark.facts.end(), - [&](const FactPair &lm_fact) { - return !contains_landmark(lm_fact); +LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark_to_add) { + assert(landmark_to_add.is_conjunctive + || all_of(landmark_to_add.atoms.begin(), landmark_to_add.atoms.end(), + [&](const FactPair &atom) { + return !contains_landmark(atom); })); /* TODO: Avoid having to fetch landmark after moving it. This will only be @@ -105,51 +106,49 @@ LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark) { because we wont need `disjunctive_landmarks_to_nodes` and `simple_landmarks_to_nodes` anymore. */ - LandmarkNode *new_node = add_node(move(landmark)); - const Landmark &lm = new_node->get_landmark(); + LandmarkNode *new_node = add_node(move(landmark_to_add)); + const Landmark &landmark = new_node->get_landmark(); - if (lm.is_disjunctive) { - for (const FactPair &lm_fact : lm.facts) { - disjunctive_landmarks_to_nodes.emplace(lm_fact, new_node); + if (landmark.is_disjunctive) { + for (const FactPair &atom : landmark.atoms) { + disjunctive_landmarks_to_nodes.emplace(atom, new_node); } ++num_disjunctive_landmarks; - } else if (lm.is_conjunctive) { + } else if (landmark.is_conjunctive) { ++num_conjunctive_landmarks; } else { - simple_landmarks_to_nodes.emplace(lm.facts.front(), new_node); + simple_landmarks_to_nodes.emplace(landmark.atoms.front(), new_node); } return *new_node; } void LandmarkGraph::remove_node_occurrences(LandmarkNode *node) { - for (const auto &parent : node->parents) { - LandmarkNode &parent_node = *(parent.first); - parent_node.children.erase(node); - assert(parent_node.children.find(node) == parent_node.children.end()); + for (const auto &[parent, type] : node->parents) { + parent->children.erase(node); + assert(parent->children.find(node) == parent->children.end()); } - for (const auto &child : node->children) { - LandmarkNode &child_node = *(child.first); - child_node.parents.erase(node); - assert(child_node.parents.find(node) == child_node.parents.end()); + for (const auto &[child, type] : node->children) { + child->parents.erase(node); + assert(child->parents.find(node) == child->parents.end()); } const Landmark &landmark = node->get_landmark(); if (landmark.is_disjunctive) { --num_disjunctive_landmarks; - for (const FactPair &lm_fact : landmark.facts) { - disjunctive_landmarks_to_nodes.erase(lm_fact); + for (const FactPair &atom : landmark.atoms) { + disjunctive_landmarks_to_nodes.erase(atom); } } else if (landmark.is_conjunctive) { --num_conjunctive_landmarks; } else { - simple_landmarks_to_nodes.erase(landmark.facts[0]); + simple_landmarks_to_nodes.erase(landmark.atoms[0]); } } void LandmarkGraph::remove_node(LandmarkNode *node) { remove_node_occurrences(node); auto it = find_if(nodes.begin(), nodes.end(), - [&node](const auto &n) { - return n.get() == node; + [&node](const auto &other) { + return other.get() == node; }); assert(it != nodes.end()); nodes.erase(it); @@ -171,8 +170,8 @@ void LandmarkGraph::remove_node_if( void LandmarkGraph::set_landmark_ids() { int id = 0; - for (const auto &lmn : nodes) { - lmn->set_id(id); + for (const auto &node : nodes) { + node->set_id(id); ++id; } } diff --git a/src/search/landmarks/landmark_graph.h b/src/search/landmarks/landmark_graph.h index 94c818ca8a..675fe98d92 100644 --- a/src/search/landmarks/landmark_graph.h +++ b/src/search/landmarks/landmark_graph.h @@ -125,30 +125,30 @@ class LandmarkGraph { const LandmarkNode *get_node(int index) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ - LandmarkNode &get_simple_landmark(const FactPair &fact) const; + LandmarkNode &get_simple_landmark_node(const FactPair &atom) const; /* This is needed only by landmark graph factories and will disappear - when moving landmark graph creation there. */ - LandmarkNode &get_disjunctive_landmark(const FactPair &fact) const; + get_num_landmarks when moving landmark graph creation there. */ + LandmarkNode &get_disjunctive_landmark_node(const FactPair &atom) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ - bool contains_simple_landmark(const FactPair &lm) const; + bool contains_simple_landmark(const FactPair &atom) const; // Only used internally. - bool contains_disjunctive_landmark(const FactPair &lm) const; + bool contains_disjunctive_landmark(const FactPair &atom) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ bool contains_overlapping_disjunctive_landmark( - const std::set &lm) const; + const std::set &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ bool contains_identical_disjunctive_landmark( - const std::set &lm) const; + const std::set &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ - bool contains_landmark(const FactPair &fact) const; + bool contains_landmark(const FactPair &atom) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ diff --git a/src/search/landmarks/landmark_heuristic.cc b/src/search/landmarks/landmark_heuristic.cc index 8333093ce2..c7e4851078 100644 --- a/src/search/landmarks/landmark_heuristic.cc +++ b/src/search/landmarks/landmark_heuristic.cc @@ -53,7 +53,7 @@ void LandmarkHeuristic::initialize( } if (use_preferred_operators) { - compute_landmarks_achieved_by_fact(); + compute_landmarks_achieved_by_atom(); /* Ideally, we should reuse the successor generator of the main task in cases where it's compatible. See issue564. */ successor_generator = @@ -120,11 +120,11 @@ void LandmarkHeuristic::compute_landmark_graph( } } -void LandmarkHeuristic::compute_landmarks_achieved_by_fact() { +void LandmarkHeuristic::compute_landmarks_achieved_by_atom() { for (const auto &node : *lm_graph) { const int id = node->get_id(); - const Landmark &lm = node->get_landmark(); - if (lm.is_conjunctive) { + const Landmark &landmark = node->get_landmark(); + if (landmark.is_conjunctive) { /* TODO: We currently have no way to declare operators preferred based on conjunctive landmarks. We consider this a bug and want @@ -132,11 +132,11 @@ void LandmarkHeuristic::compute_landmarks_achieved_by_fact() { */ continue; } - for (const auto &fact_pair : lm.facts) { - if (landmarks_achieved_by_fact.contains(fact_pair)) { - landmarks_achieved_by_fact[fact_pair].insert(id); + for (const auto &atom : landmark.atoms) { + if (landmarks_achieved_by_atom.contains(atom)) { + landmarks_achieved_by_atom[atom].insert(id); } else { - landmarks_achieved_by_fact[fact_pair] = {id}; + landmarks_achieved_by_atom[atom] = {id}; } } } @@ -148,9 +148,9 @@ bool LandmarkHeuristic::operator_is_preferred( if (!does_fire(effect, state)) { continue; } - const FactPair fact_pair = effect.get_fact().get_pair(); - if (landmarks_achieved_by_fact.contains(fact_pair)) { - for (const int id : landmarks_achieved_by_fact[fact_pair]) { + const FactPair atom = effect.get_fact().get_pair(); + if (landmarks_achieved_by_atom.contains(atom)) { + for (const int id : landmarks_achieved_by_atom[atom]) { if (future.test(id)) { return true; } diff --git a/src/search/landmarks/landmark_heuristic.h b/src/search/landmarks/landmark_heuristic.h index f21cebc0a7..c85fb1f884 100644 --- a/src/search/landmarks/landmark_heuristic.h +++ b/src/search/landmarks/landmark_heuristic.h @@ -31,7 +31,7 @@ class LandmarkHeuristic : public Heuristic { std::shared_ptr lm_graph; const bool use_preferred_operators; // This map remains empty unless *use_preferred_operators* is true. - utils::HashMap> landmarks_achieved_by_fact; + utils::HashMap> landmarks_achieved_by_atom; std::unique_ptr lm_status_manager; std::unique_ptr successor_generator; @@ -46,7 +46,7 @@ class LandmarkHeuristic : public Heuristic { bool operator_is_preferred( const OperatorProxy &op, const State &state, ConstBitsetView &future); - void compute_landmarks_achieved_by_fact(); + void compute_landmarks_achieved_by_atom(); void generate_preferred_operators( const State &state, ConstBitsetView &future); virtual int compute_heuristic(const State &ancestor_state) override; diff --git a/src/search/landmarks/util.cc b/src/search/landmarks/util.cc index 3113209666..5388eecf12 100644 --- a/src/search/landmarks/util.cc +++ b/src/search/landmarks/util.cc @@ -50,8 +50,8 @@ bool possibly_reaches_lm(const OperatorProxy &op, for (EffectProxy effect: op.get_effects()) { FactProxy effect_fact = effect.get_fact(); assert(!reached[effect_fact.get_variable().get_id()].empty()); - for (const FactPair &fact : landmark.facts) { - if (effect_fact.get_pair() == fact) { + for (const FactPair &atom : landmark.atoms) { + if (effect_fact.get_pair() == atom) { if (_possibly_fires(effect.get_conditions(), reached)) return true; break; @@ -91,7 +91,7 @@ static void dump_node( cout << " lm" << node.get_id() << " [label=\""; bool first = true; const Landmark &landmark = node.get_landmark(); - for (FactPair fact : landmark.facts) { + for (FactPair atom : landmark.atoms) { if (!first) { if (landmark.is_disjunctive) { cout << " | "; @@ -100,8 +100,8 @@ static void dump_node( } } first = false; - VariableProxy var = task_proxy.get_variables()[fact.var]; - cout << var.get_fact(fact.value).get_name(); + VariableProxy var = task_proxy.get_variables()[atom.var]; + cout << var.get_fact(atom.value).get_name(); } cout << "\""; if (landmark.is_true_in_state(task_proxy.get_initial_state())) { From 7297587922ca224b11da85b43f6b1a6c957520c7 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Mon, 10 Feb 2025 15:26:10 +0100 Subject: [PATCH 005/108] Clean up landmark_graph.cc and use HashSet instead of set. --- .../cartesian_abstractions/utils_landmarks.cc | 1 + .../landmarks/landmark_factory_merged.cc | 3 +- .../landmarks/landmark_factory_rpg_sasp.cc | 13 +++-- .../landmarks/landmark_factory_rpg_sasp.h | 6 +- src/search/landmarks/landmark_graph.cc | 56 ++++++++----------- src/search/landmarks/landmark_graph.h | 12 ++-- 6 files changed, 42 insertions(+), 49 deletions(-) diff --git a/src/search/cartesian_abstractions/utils_landmarks.cc b/src/search/cartesian_abstractions/utils_landmarks.cc index 47c21f8222..d962bb3a1e 100644 --- a/src/search/cartesian_abstractions/utils_landmarks.cc +++ b/src/search/cartesian_abstractions/utils_landmarks.cc @@ -55,6 +55,7 @@ VarToValues get_prev_landmarks(const LandmarkNode *node) { VarToValues groups; vector open; unordered_set closed; + open.reserve(node->parents.size()); for (const auto &[parent, type] : node->parents) { open.push_back(parent); } diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index 910738ff3b..a2a1cfc7f3 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -29,7 +29,8 @@ LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( else return nullptr; } else if (landmark.is_disjunctive) { - set atoms(landmark.atoms.begin(), landmark.atoms.end()); + const utils::HashSet atoms( + landmark.atoms.begin(), landmark.atoms.end()); if (lm_graph->contains_identical_disjunctive_landmark(atoms)) return &lm_graph->get_disjunctive_landmark_node(landmark.atoms[0]); else diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 9a2b8f55e3..590d1d7369 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -108,7 +108,7 @@ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_lm( } // Check for lmp in conditional effects - set lm_props_achievable; + unordered_set lm_props_achievable; for (EffectProxy effect : effects) { FactProxy effect_fact = effect.get_fact(); for (size_t j = 0; j < landmark.atoms.size(); ++j) @@ -203,7 +203,7 @@ void LandmarkFactoryRpgSasp::found_simple_lm_and_order( } void LandmarkFactoryRpgSasp::found_disj_lm_and_order( - const TaskProxy &task_proxy, const set &atoms, + const TaskProxy &task_proxy, const utils::HashSet &atoms, LandmarkNode &node, OrderingType type) { bool simple_lm_exists = false; // TODO: assign with FactPair::no_fact @@ -342,7 +342,8 @@ void LandmarkFactoryRpgSasp::build_disjunction_classes( } void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( - const TaskProxy &task_proxy, vector> &disjunctive_pre, + const TaskProxy &task_proxy, + vector> &disjunctive_pre, vector> &reached, const Landmark &landmark) { /* Compute disjunctive preconditions from all operators than can potentially @@ -362,7 +363,7 @@ void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( int num_ops = 0; unordered_map> preconditions; // maps from // pddl_proposition_indeces to props - unordered_map> used_operators; // tells for each + unordered_map> used_operators; // tells for each // proposition which operators use it for (size_t i = 0; i < op_or_axiom_ids.size(); ++i) { OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axiom_ids[i]); @@ -390,7 +391,7 @@ void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( } for (const auto &pre : preconditions) { if (static_cast(used_operators[pre.first].size()) == num_ops) { - set pre_set; // the set gets rid of duplicate predicates + utils::HashSet pre_set; pre_set.insert(pre.second.begin(), pre.second.end()); if (pre_set.size() > 1) { // otherwise this LM is not actually a disjunctive LM disjunctive_pre.push_back(pre_set); @@ -452,7 +453,7 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( approximate_lookahead_orders(task_proxy, reached, lm_node); // Process achieving operators again to find disjunctive LMs - vector> disjunctive_pre; + vector> disjunctive_pre; compute_disjunctive_preconditions( task_proxy, disjunctive_pre, reached, landmark); for (const auto &preconditions : disjunctive_pre) diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index b2a66445f8..8da6469a15 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -3,6 +3,8 @@ #include "landmark_factory_relaxation.h" +#include "../utils/hash.h" + #include #include #include @@ -38,7 +40,7 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { std::vector> &reached, const Landmark &landmark); void compute_disjunctive_preconditions( const TaskProxy &task_proxy, - std::vector> &disjunctive_pre, + std::vector> &disjunctive_pre, std::vector> &reached, const Landmark &landmark); @@ -48,7 +50,7 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { void found_simple_lm_and_order(const FactPair &atom, LandmarkNode &node, OrderingType type); void found_disj_lm_and_order(const TaskProxy &task_proxy, - const std::set &atoms, + const utils::HashSet &atoms, LandmarkNode &node, OrderingType type); void approximate_lookahead_orders(const TaskProxy &task_proxy, diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index 0306acc5b6..b4f0535aef 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -5,9 +5,8 @@ #include "../utils/memory.h" #include -#include +#include #include -#include #include using namespace std; @@ -19,8 +18,9 @@ LandmarkGraph::LandmarkGraph() int LandmarkGraph::get_num_orderings() const { int total = 0; - for (const auto &node : nodes) - total += node->children.size(); + for (const auto &node : nodes) { + total += static_cast(node->children.size()); + } return total; } @@ -34,7 +34,6 @@ LandmarkNode &LandmarkGraph::get_simple_landmark_node( return *(simple_landmarks_to_nodes.find(atom)->second); } -// needed only by landmarkgraph-factories. LandmarkNode &LandmarkGraph::get_disjunctive_landmark_node( const FactPair &atom) const { /* Note: this only works because every proposition appears in only one @@ -46,26 +45,23 @@ LandmarkNode &LandmarkGraph::get_disjunctive_landmark_node( bool LandmarkGraph::contains_simple_landmark(const FactPair &atom) const { - return simple_landmarks_to_nodes.count(atom) != 0; + return simple_landmarks_to_nodes.contains(atom); } bool LandmarkGraph::contains_disjunctive_landmark(const FactPair &atom) const { - return disjunctive_landmarks_to_nodes.count(atom) != 0; + return disjunctive_landmarks_to_nodes.contains(atom); } bool LandmarkGraph::contains_overlapping_disjunctive_landmark( - const set &atoms) const { - // Test whether ONE of the atoms is present in some disjunctive landmark. - for (const FactPair &atom : atoms) { - if (contains_disjunctive_landmark(atom)) - return true; - } - return false; + const utils::HashSet &atoms) const { + return any_of(atoms.begin(), atoms.end(), [&](const FactPair &atom) { + return contains_disjunctive_landmark(atom); + }); } bool LandmarkGraph::contains_identical_disjunctive_landmark( - const set &atoms) const { - LandmarkNode *node = nullptr; + const utils::HashSet &atoms) const { + const LandmarkNode *node = nullptr; for (const FactPair &atom : atoms) { auto it = disjunctive_landmarks_to_nodes.find(atom); if (it == disjunctive_landmarks_to_nodes.end()) @@ -95,11 +91,9 @@ LandmarkNode *LandmarkGraph::add_node(Landmark &&landmark) { } LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark_to_add) { - assert(landmark_to_add.is_conjunctive - || all_of(landmark_to_add.atoms.begin(), landmark_to_add.atoms.end(), - [&](const FactPair &atom) { - return !contains_landmark(atom); - })); + assert(landmark_to_add.is_conjunctive || all_of( + landmark_to_add.atoms.begin(), landmark_to_add.atoms.end(), + [&](const FactPair &atom) {return !contains_landmark(atom);})); /* TODO: Avoid having to fetch landmark after moving it. This will only be possible after removing the assumption that landmarks don't overlap @@ -125,11 +119,11 @@ LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark_to_add) { void LandmarkGraph::remove_node_occurrences(LandmarkNode *node) { for (const auto &[parent, type] : node->parents) { parent->children.erase(node); - assert(parent->children.find(node) == parent->children.end()); + assert(!parent->children.contains(node)); } for (const auto &[child, type] : node->children) { child->parents.erase(node); - assert(child->parents.find(node) == child->parents.end()); + assert(!child->parents.contains(node)); } const Landmark &landmark = node->get_landmark(); if (landmark.is_disjunctive) { @@ -146,10 +140,9 @@ void LandmarkGraph::remove_node_occurrences(LandmarkNode *node) { void LandmarkGraph::remove_node(LandmarkNode *node) { remove_node_occurrences(node); - auto it = find_if(nodes.begin(), nodes.end(), - [&node](const auto &other) { - return other.get() == node; - }); + const auto it = + find_if(nodes.begin(), nodes.end(), + [&node](const auto &other) {return other.get() == node;}); assert(it != nodes.end()); nodes.erase(it); } @@ -161,11 +154,10 @@ void LandmarkGraph::remove_node_if( remove_node_occurrences(node.get()); } } - nodes.erase(remove_if(nodes.begin(), nodes.end(), - [&remove_node_condition]( - const unique_ptr &node) { - return remove_node_condition(*node); - }), nodes.end()); + erase_if(nodes, + [&remove_node_condition](const unique_ptr &node) { + return remove_node_condition(*node); + }); } void LandmarkGraph::set_landmark_ids() { diff --git a/src/search/landmarks/landmark_graph.h b/src/search/landmarks/landmark_graph.h index 675fe98d92..a179c5fdee 100644 --- a/src/search/landmarks/landmark_graph.h +++ b/src/search/landmarks/landmark_graph.h @@ -9,11 +9,7 @@ #include "../utils/memory.h" #include -#include -#include -#include #include -#include #include namespace landmarks { @@ -105,7 +101,7 @@ class LandmarkGraph { // Needed by both landmark graph factories and non-landmark-graph factories. int get_num_landmarks() const { - return nodes.size(); + return static_cast(nodes.size()); } /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ @@ -140,11 +136,11 @@ class LandmarkGraph { when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ bool contains_overlapping_disjunctive_landmark( - const std::set &atoms) const; + const utils::HashSet &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ bool contains_identical_disjunctive_landmark( - const std::set &atoms) const; + const utils::HashSet &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ @@ -152,7 +148,7 @@ class LandmarkGraph { /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ - LandmarkNode &add_landmark(Landmark &&landmark); + LandmarkNode &add_landmark(Landmark &&landmark_to_add); /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ void remove_node(LandmarkNode *node); From 63ee8ce75b6b725f9a5822216029fddda4e98de4 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 13 Feb 2025 16:59:35 +0100 Subject: [PATCH 006/108] Incorporate review comments. --- src/search/landmarks/exploration.cc | 80 ++++++++++++------- src/search/landmarks/exploration.h | 6 +- .../landmark_factory_reasonable_orders_hps.cc | 9 +-- .../landmarks/landmark_factory_rpg_sasp.cc | 9 +-- src/search/landmarks/landmark_graph.cc | 2 +- 5 files changed, 63 insertions(+), 43 deletions(-) diff --git a/src/search/landmarks/exploration.cc b/src/search/landmarks/exploration.cc index 411f29dad9..8cc8e94c24 100644 --- a/src/search/landmarks/exploration.cc +++ b/src/search/landmarks/exploration.cc @@ -55,33 +55,56 @@ static int compute_number_of_unary_operators( } void Exploration::build_unary_operators() { + const OperatorsProxy operators = task_proxy.get_operators(); + const AxiomsProxy axioms = task_proxy.get_axioms(); /* - Reserve vector size unary operators. This is needed because we - cross-reference to the memory address of elements of the vector while - building it; meaning a resize would invalidate all references. + We need to reserve memory for this vector because we cross-reference to + the memory address of its elements while building it, meaning a resize + would invalidate all references. */ - OperatorsProxy operators = task_proxy.get_operators(); - AxiomsProxy axioms = task_proxy.get_axioms(); unary_operators.reserve( compute_number_of_unary_operators(operators, axioms)); - // Build unary operators for operators and axioms. - for (OperatorProxy op : operators) + for (const OperatorProxy &op : operators) { build_unary_operators(op); - for (OperatorProxy axiom : axioms) + } + for (const OperatorProxy &axiom : axioms) { build_unary_operators(axiom); + } } -vector Exploration::get_sorted_precondition_propositions( - const vector &preconditions, const EffectProxy &effect) { - vector extended_preconditions(preconditions); - const EffectConditionsProxy &effect_conditions = effect.get_conditions(); - for (FactProxy effect_condition : effect_conditions) { - extended_preconditions.push_back(effect_condition.get_pair()); +static vector get_sorted_effect_conditions( + const EffectProxy &effect) { + vector effect_conditions; + effect_conditions.reserve(effect.get_conditions().size()); + for (FactProxy effect_condition: effect.get_conditions()) { + effect_conditions.push_back(effect_condition.get_pair()); } + sort(effect_conditions.begin(), effect_conditions.end()); + return effect_conditions; +} - sort(extended_preconditions.begin(), extended_preconditions.end()); +static vector get_sorted_extended_preconditions( + const vector &preconditions, const EffectProxy &effect) { + /* Since this function is called with the same `preconditions` repeatedly, + we expect them to be sorted to avoid sorting them over and over again. */ + assert(is_sorted(preconditions.begin(), preconditions.end())); + vector effect_conditions = get_sorted_effect_conditions(effect); + + vector extended_preconditions; + extended_preconditions.reserve( + preconditions.size() + effect_conditions.size()); + merge(preconditions.begin(), preconditions.end(), effect_conditions.begin(), + effect_conditions.end(), extended_preconditions.begin()); + assert(is_sorted( + extended_preconditions.begin(), extended_preconditions.end())); + return extended_preconditions; +} +vector Exploration::get_sorted_precondition_propositions( + const vector &preconditions, const EffectProxy &effect) { + vector extended_preconditions = + get_sorted_extended_preconditions(preconditions, effect); vector precondition_propositions; precondition_propositions.reserve(extended_preconditions.size()); for (const FactPair &precondition_fact : extended_preconditions) { @@ -98,6 +121,7 @@ void Exploration::build_unary_operators(const OperatorProxy &op) { for (FactProxy pre : op.get_preconditions()) { preconditions.push_back(pre.get_pair()); } + sort(preconditions.begin(), preconditions.end()); for (EffectProxy effect : op.get_effects()) { vector precondition_propositions = get_sorted_precondition_propositions(preconditions, effect); @@ -122,10 +146,10 @@ void Exploration::reset_reachability_information() { } } -void Exploration::set_state_facts_reached(const State &state) { - for (FactProxy fact : state) { +void Exploration::set_state_atoms_reached(const State &state) { + for (FactProxy atom : state) { Proposition *init_prop = - &propositions[fact.get_variable().get_id()][fact.get_value()]; + &propositions[atom.get_variable().get_id()][atom.get_value()]; enqueue_if_necessary(init_prop); } } @@ -166,7 +190,7 @@ void Exploration::initialize_operator_data( get_excluded_operators(excluded_op_ids); for (UnaryOperator &op : unary_operators) { - op.unsatisfied_preconditions = op.num_preconditions; + op.num_unsatisfied_preconditions = op.num_preconditions; /* Aside from UnaryOperators derived from operators with an id in @@ -181,7 +205,7 @@ void Exploration::initialize_operator_data( op.excluded = false; // Reset from previous exploration. // Queue effects of precondition-free operators. - if (op.unsatisfied_preconditions == 0) { + if (op.num_unsatisfied_preconditions == 0) { enqueue_if_necessary(op.effect); } } @@ -201,16 +225,16 @@ void Exploration::setup_exploration_queue( reset_reachability_information(); // Set *excluded* to true for initializing operator data. - for (const FactPair &fact : excluded_props) { - propositions[fact.var][fact.value].excluded = true; + for (const FactPair &atom : excluded_props) { + propositions[atom.var][atom.value].excluded = true; } - set_state_facts_reached(state); + set_state_atoms_reached(state); initialize_operator_data(excluded_op_ids); // Reset *excluded* to false for the next exploration. - for (const FactPair &fact : excluded_props) { - propositions[fact.var][fact.value].excluded = false; + for (const FactPair &atom : excluded_props) { + propositions[atom.var][atom.value].excluded = false; } } @@ -224,9 +248,9 @@ void Exploration::relaxed_exploration() { for (UnaryOperator *unary_op : triggered_operators) { if (unary_op->excluded) continue; - --unary_op->unsatisfied_preconditions; - assert(unary_op->unsatisfied_preconditions >= 0); - if (unary_op->unsatisfied_preconditions == 0) { + --unary_op->num_unsatisfied_preconditions; + assert(unary_op->num_unsatisfied_preconditions >= 0); + if (unary_op->num_unsatisfied_preconditions == 0) { enqueue_if_necessary(unary_op->effect); } } diff --git a/src/search/landmarks/exploration.h b/src/search/landmarks/exploration.h index 4e46a2d908..8e6e783242 100644 --- a/src/search/landmarks/exploration.h +++ b/src/search/landmarks/exploration.h @@ -40,14 +40,14 @@ struct UnaryOperator { const int num_preconditions; Proposition *effect; - int unsatisfied_preconditions; + int num_unsatisfied_preconditions; bool excluded; UnaryOperator(const std::vector &preconditions, Proposition *eff, int op_or_axiom_id) : op_or_axiom_id(op_or_axiom_id), num_preconditions(static_cast(preconditions.size())), effect(eff), - unsatisfied_preconditions(num_preconditions), + num_unsatisfied_preconditions(num_preconditions), excluded(false) {} }; @@ -65,7 +65,7 @@ class Exploration { void build_unary_operators(const OperatorProxy &op); void reset_reachability_information(); - void set_state_facts_reached(const State &state); + void set_state_atoms_reached(const State &state); std::unordered_set get_excluded_operators( const std::vector &excluded_op_ids) const; void initialize_operator_data(const std::vector &excluded_op_ids); diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 6524d1a4b1..1e7fbbe531 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -36,7 +36,7 @@ void LandmarkFactoryReasonableOrdersHPS::generate_landmarks(const shared_ptr interesting_nodes(variables_size); for (const auto &child : node_p->children) { const LandmarkNode &node2_p = *child.first; diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 590d1d7369..757ad55b1c 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -286,8 +286,7 @@ static string get_predicate_for_atom(const VariablesProxy &variables, } size_t paren_pos = atom_name.find('(', predicate_pos); if (predicate_pos == 0 || paren_pos == string::npos) { - cerr << "error: cannot extract predicate from atom: " - << atom_name << endl; + cerr << "Cannot extract predicate from atom: " << atom_name << endl; utils::exit_with(ExitCode::SEARCH_INPUT_ERROR); } return string(atom_name.begin() + @@ -381,9 +380,9 @@ void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( // Only deal with propositions that are not shared preconditions // (those have been found already and are simple landmarks). - const FactPair atom(pre.first, pre.second); - if (!lm_graph->contains_simple_landmark(atom)) { - preconditions[disj_class].push_back(atom); + const FactPair precondition(pre.first, pre.second); + if (!lm_graph->contains_simple_landmark(precondition)) { + preconditions[disj_class].push_back(precondition); used_operators[disj_class].insert(i); } } diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index b4f0535aef..f1e61c42d5 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -141,7 +141,7 @@ void LandmarkGraph::remove_node_occurrences(LandmarkNode *node) { void LandmarkGraph::remove_node(LandmarkNode *node) { remove_node_occurrences(node); const auto it = - find_if(nodes.begin(), nodes.end(), + find_if(nodes.cbegin(), nodes.cend(), [&node](const auto &other) {return other.get() == node;}); assert(it != nodes.end()); nodes.erase(it); From 8a259907c1f8cf08ebced8a6b88dc20236c08d37 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Feb 2025 14:22:43 +0100 Subject: [PATCH 007/108] Rename most occurrences of `lm` in variable and function names. --- .../cartesian_abstractions/utils_landmarks.cc | 4 +- .../landmark_cost_partitioning_algorithms.cc | 66 +++---- .../landmark_cost_partitioning_algorithms.h | 6 +- .../landmark_cost_partitioning_heuristic.cc | 12 +- .../landmark_cost_partitioning_heuristic.h | 4 +- src/search/landmarks/landmark_factory.cc | 34 ++-- src/search/landmarks/landmark_factory.h | 6 +- src/search/landmarks/landmark_factory_h_m.cc | 60 +++---- src/search/landmarks/landmark_factory_h_m.h | 4 +- .../landmarks/landmark_factory_merged.cc | 62 +++---- .../landmarks/landmark_factory_merged.h | 2 +- .../landmark_factory_reasonable_orders_hps.cc | 15 +- .../landmark_factory_reasonable_orders_hps.h | 2 +- .../landmarks/landmark_factory_relaxation.cc | 8 +- .../landmarks/landmark_factory_rpg_exhaust.cc | 10 +- .../landmarks/landmark_factory_rpg_sasp.cc | 170 +++++++++--------- .../landmarks/landmark_factory_rpg_sasp.h | 24 +-- .../landmarks/landmark_factory_zhu_givan.cc | 73 ++++---- .../landmarks/landmark_factory_zhu_givan.h | 16 +- src/search/landmarks/landmark_heuristic.cc | 38 ++-- src/search/landmarks/landmark_heuristic.h | 8 +- .../landmarks/landmark_status_manager.cc | 55 +++--- .../landmarks/landmark_status_manager.h | 4 +- .../landmarks/landmark_sum_heuristic.cc | 18 +- src/search/landmarks/landmark_sum_heuristic.h | 2 +- src/search/landmarks/util.cc | 10 +- src/search/landmarks/util.h | 2 +- 27 files changed, 361 insertions(+), 354 deletions(-) diff --git a/src/search/cartesian_abstractions/utils_landmarks.cc b/src/search/cartesian_abstractions/utils_landmarks.cc index d962bb3a1e..0dd80905dc 100644 --- a/src/search/cartesian_abstractions/utils_landmarks.cc +++ b/src/search/cartesian_abstractions/utils_landmarks.cc @@ -21,10 +21,10 @@ static FactPair get_atom(const Landmark &landmark) { shared_ptr get_landmark_graph( const shared_ptr &task) { - LandmarkFactoryHM lm_graph_factory( + LandmarkFactoryHM landmark_graph_factory( 1, false, true, utils::Verbosity::SILENT); - return lm_graph_factory.compute_lm_graph(task); + return landmark_graph_factory.compute_landmark_graph(task); } vector get_atom_landmarks(const LandmarkGraph &graph) { diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc index 22ae19d967..d641bb03c6 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc @@ -19,7 +19,7 @@ using namespace std; namespace landmarks { CostPartitioningAlgorithm::CostPartitioningAlgorithm( const vector &operator_costs, const LandmarkGraph &graph) - : lm_graph(graph), operator_costs(operator_costs) { + : landmark_graph(graph), operator_costs(operator_costs) { } const unordered_set &CostPartitioningAlgorithm::get_achievers( @@ -41,22 +41,22 @@ UniformCostPartitioningAlgorithm::UniformCostPartitioningAlgorithm( } double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( - const LandmarkStatusManager &lm_status_manager, + const LandmarkStatusManager &landmark_status_manager, const State &ancestor_state) { - vector achieved_lms_by_op(operator_costs.size(), 0); + vector landmarks_achieved_by_operator(operator_costs.size(), 0); vector action_landmarks(operator_costs.size(), false); ConstBitsetView past = - lm_status_manager.get_past_landmarks(ancestor_state); + landmark_status_manager.get_past_landmarks(ancestor_state); ConstBitsetView future = - lm_status_manager.get_future_landmarks(ancestor_state); + landmark_status_manager.get_future_landmarks(ancestor_state); double h = 0; /* First pass: compute which op achieves how many landmarks. Along the way, mark action landmarks and add their cost to h. */ - for (const auto &node : lm_graph) { + for (const auto &node : landmark_graph) { int id = node->get_id(); if (future.test(id)) { const unordered_set &achievers = @@ -73,8 +73,8 @@ double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( } } else { for (int op_id : achievers) { - assert(utils::in_bounds(op_id, achieved_lms_by_op)); - ++achieved_lms_by_op[op_id]; + assert(utils::in_bounds(op_id, landmarks_achieved_by_operator)); + ++landmarks_achieved_by_operator[op_id]; } } } @@ -83,39 +83,39 @@ double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( /* TODO: Replace with Landmarks (to do so, we need some way to access the status of a Landmark without access to the ID, which is part of LandmarkNode). */ - vector relevant_lms; + vector relevant_landmarks; /* Second pass: remove landmarks from consideration that are covered by an action landmark; decrease the counters accordingly so that no unnecessary cost is assigned to these landmarks. */ - for (const auto &node : lm_graph) { + for (const auto &node : landmark_graph) { int id = node->get_id(); if (future.test(id)) { const unordered_set &achievers = get_achievers(node->get_landmark(), past.test(id)); - bool covered_by_action_lm = false; + bool covered_by_action_landmark = false; for (int op_id : achievers) { assert(utils::in_bounds(op_id, action_landmarks)); if (action_landmarks[op_id]) { - covered_by_action_lm = true; + covered_by_action_landmark = true; break; } } - if (covered_by_action_lm) { + if (covered_by_action_landmark) { for (int op_id : achievers) { - assert(utils::in_bounds(op_id, achieved_lms_by_op)); - --achieved_lms_by_op[op_id]; + assert(utils::in_bounds(op_id, landmarks_achieved_by_operator)); + --landmarks_achieved_by_operator[op_id]; } } else { - relevant_lms.push_back(node.get()); + relevant_landmarks.push_back(node.get()); } } } /* Third pass: count shared costs for the remaining landmarks. */ - for (const LandmarkNode *node : relevant_lms) { + for (const LandmarkNode *node : relevant_landmarks) { // TODO: Iterate over Landmarks instead of LandmarkNodes int id = node->get_id(); assert(future.test(id)); @@ -123,8 +123,8 @@ double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( get_achievers(node->get_landmark(), past.test(id)); double min_cost = numeric_limits::max(); for (int op_id : achievers) { - assert(utils::in_bounds(op_id, achieved_lms_by_op)); - int num_achieved = achieved_lms_by_op[op_id]; + assert(utils::in_bounds(op_id, landmarks_achieved_by_operator)); + int num_achieved = landmarks_achieved_by_operator[op_id]; assert(num_achieved >= 1); assert(utils::in_bounds(op_id, operator_costs)); double partitioned_cost = @@ -149,7 +149,7 @@ OptimalCostPartitioningAlgorithm::OptimalCostPartitioningAlgorithm( lp::LinearProgram OptimalCostPartitioningAlgorithm::build_initial_lp() { /* The LP has one variable (column) per landmark and one inequality (row) per operator. */ - int num_cols = lm_graph.get_num_landmarks(); + int num_cols = landmark_graph.get_num_landmarks(); int num_rows = operator_costs.size(); named_vector::NamedVector lp_variables; @@ -175,28 +175,28 @@ lp::LinearProgram OptimalCostPartitioningAlgorithm::build_initial_lp() { } double OptimalCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( - const LandmarkStatusManager &lm_status_manager, + const LandmarkStatusManager &landmark_status_manager, const State &ancestor_state) { /* TODO: We could also do the same thing with action landmarks we do in the uniform cost partitioning case. */ ConstBitsetView past = - lm_status_manager.get_past_landmarks(ancestor_state); + landmark_status_manager.get_past_landmarks(ancestor_state); ConstBitsetView future = - lm_status_manager.get_future_landmarks(ancestor_state); + landmark_status_manager.get_future_landmarks(ancestor_state); /* Set up LP variable bounds for the landmarks. The range of cost(lm_1) is {0} if the landmark is already reached; otherwise it is [0, infinity]. The lower bounds are set to 0 in the constructor and never change. */ - int num_cols = lm_graph.get_num_landmarks(); - for (int lm_id = 0; lm_id < num_cols; ++lm_id) { - if (future.test(lm_id)) { - lp.get_variables()[lm_id].upper_bound = lp_solver.get_infinity(); + int num_cols = landmark_graph.get_num_landmarks(); + for (int id = 0; id < num_cols; ++id) { + if (future.test(id)) { + lp.get_variables()[id].upper_bound = lp_solver.get_infinity(); } else { - lp.get_variables()[lm_id].upper_bound = 0; + lp.get_variables()[id].upper_bound = 0; } } @@ -212,16 +212,16 @@ double OptimalCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( for (lp::LPConstraint &constraint : lp_constraints) { constraint.clear(); } - for (int lm_id = 0; lm_id < num_cols; ++lm_id) { - const Landmark &landmark = lm_graph.get_node(lm_id)->get_landmark(); - if (future.test(lm_id)) { + for (int id = 0; id < num_cols; ++id) { + const Landmark &landmark = landmark_graph.get_node(id)->get_landmark(); + if (future.test(id)) { const unordered_set &achievers = - get_achievers(landmark, past.test(lm_id)); + get_achievers(landmark, past.test(id)); if (achievers.empty()) return numeric_limits::max(); for (int op_id : achievers) { assert(utils::in_bounds(op_id, lp_constraints)); - lp_constraints[op_id].insert(lm_id, 1.0); + lp_constraints[op_id].insert(id, 1.0); } } } diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.h b/src/search/landmarks/landmark_cost_partitioning_algorithms.h index a4a6cdf80d..c95b313ca4 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.h +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.h @@ -18,7 +18,7 @@ class LandmarkStatusManager; class CostPartitioningAlgorithm { protected: - const LandmarkGraph &lm_graph; + const LandmarkGraph &landmark_graph; const std::vector operator_costs; const std::unordered_set &get_achievers( @@ -41,7 +41,7 @@ class UniformCostPartitioningAlgorithm : public CostPartitioningAlgorithm { bool use_action_landmarks); virtual double get_cost_partitioned_heuristic_value( - const LandmarkStatusManager &lm_status_manager, + const LandmarkStatusManager &landmark_status_manager, const State &ancestor_state) override; }; @@ -65,7 +65,7 @@ class OptimalCostPartitioningAlgorithm : public CostPartitioningAlgorithm { lp::LPSolverType solver_type); virtual double get_cost_partitioned_heuristic_value( - const LandmarkStatusManager &lm_status_manager, + const LandmarkStatusManager &landmark_status_manager, const State &ancestor_state) override; }; } diff --git a/src/search/landmarks/landmark_cost_partitioning_heuristic.cc b/src/search/landmarks/landmark_cost_partitioning_heuristic.cc index 9adf5a08bc..c1d4133cc9 100644 --- a/src/search/landmarks/landmark_cost_partitioning_heuristic.cc +++ b/src/search/landmarks/landmark_cost_partitioning_heuristic.cc @@ -33,13 +33,13 @@ LandmarkCostPartitioningHeuristic::LandmarkCostPartitioningHeuristic( } void LandmarkCostPartitioningHeuristic::check_unsupported_features( - const shared_ptr &lm_factory) { + const shared_ptr &landmark_factory) { if (task_properties::has_axioms(task_proxy)) { cerr << "Cost partitioning does not support axioms." << endl; utils::exit_with(utils::ExitCode::SEARCH_UNSUPPORTED); } - if (!lm_factory->supports_conditional_effects() + if (!landmark_factory->supports_conditional_effects() && task_properties::has_conditional_effects(task_proxy)) { cerr << "Conditional effects not supported by the landmark " << "generation method." << endl; @@ -49,17 +49,17 @@ void LandmarkCostPartitioningHeuristic::check_unsupported_features( void LandmarkCostPartitioningHeuristic::set_cost_partitioning_algorithm( CostPartitioningMethod cost_partitioning, lp::LPSolverType lpsolver, - bool alm) { + bool use_action_landmarks) { if (cost_partitioning == CostPartitioningMethod::OPTIMAL) { cost_partitioning_algorithm = utils::make_unique_ptr( task_properties::get_operator_costs(task_proxy), - *lm_graph, lpsolver); + *landmark_graph, lpsolver); } else if (cost_partitioning == CostPartitioningMethod::UNIFORM) { cost_partitioning_algorithm = utils::make_unique_ptr( task_properties::get_operator_costs(task_proxy), - *lm_graph, alm); + *landmark_graph, use_action_landmarks); } else { ABORT("Unknown cost partitioning method"); } @@ -71,7 +71,7 @@ int LandmarkCostPartitioningHeuristic::get_heuristic_value( double h_val = cost_partitioning_algorithm->get_cost_partitioned_heuristic_value( - *lm_status_manager, ancestor_state); + *landmark_status_manager, ancestor_state); if (h_val == numeric_limits::max()) { return DEAD_END; } else { diff --git a/src/search/landmarks/landmark_cost_partitioning_heuristic.h b/src/search/landmarks/landmark_cost_partitioning_heuristic.h index be64e867cd..a3e4cd5a3f 100644 --- a/src/search/landmarks/landmark_cost_partitioning_heuristic.h +++ b/src/search/landmarks/landmark_cost_partitioning_heuristic.h @@ -16,10 +16,10 @@ class LandmarkCostPartitioningHeuristic : public LandmarkHeuristic { std::unique_ptr cost_partitioning_algorithm; void check_unsupported_features( - const std::shared_ptr &lm_factory); + const std::shared_ptr &landmark_factory); void set_cost_partitioning_algorithm( CostPartitioningMethod cost_partitioning, - lp::LPSolverType lpsolver, bool alm); + lp::LPSolverType lpsolver, bool use_action_landmarks); int get_heuristic_value(const State &ancestor_state) override; public: diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index 323f5f9393..f8848ae728 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -18,7 +18,7 @@ using namespace std; namespace landmarks { LandmarkFactory::LandmarkFactory(utils::Verbosity verbosity) - : log(utils::get_log_for_verbosity(verbosity)), lm_graph(nullptr) { + : log(utils::get_log_for_verbosity(verbosity)), landmark_graph(nullptr) { } /* @@ -42,45 +42,45 @@ LandmarkFactory::LandmarkFactory(utils::Verbosity verbosity) ensure that the TaskProxy used by the Exploration object is the same as the TaskProxy object passed to this function. */ -shared_ptr LandmarkFactory::compute_lm_graph( +shared_ptr LandmarkFactory::compute_landmark_graph( const shared_ptr &task) { - if (lm_graph) { - if (lm_graph_task != task.get()) { + if (landmark_graph) { + if (landmark_graph_task != task.get()) { cerr << "LandmarkFactory was asked to compute landmark graphs for " << "two different tasks. This is currently not supported." << endl; utils::exit_with(utils::ExitCode::SEARCH_UNSUPPORTED); } - return lm_graph; + return landmark_graph; } - lm_graph_task = task.get(); - utils::Timer lm_generation_timer; + landmark_graph_task = task.get(); + utils::Timer landmark_generation_timer; - lm_graph = make_shared(); + landmark_graph = make_shared(); TaskProxy task_proxy(*task); generate_operators_lookups(task_proxy); generate_landmarks(task); if (log.is_at_least_normal()) { - log << "Landmarks generation time: " << lm_generation_timer << endl; - if (lm_graph->get_num_landmarks() == 0) { + log << "Landmarks generation time: " << landmark_generation_timer << endl; + if (landmark_graph->get_num_landmarks() == 0) { if (log.is_warning()) { log << "Warning! No landmarks found. Task unsolvable?" << endl; } } else { - log << "Discovered " << lm_graph->get_num_landmarks() - << " landmarks, of which " << lm_graph->get_num_disjunctive_landmarks() + log << "Discovered " << landmark_graph->get_num_landmarks() + << " landmarks, of which " << landmark_graph->get_num_disjunctive_landmarks() << " are disjunctive and " - << lm_graph->get_num_conjunctive_landmarks() << " are conjunctive." << endl; - log << lm_graph->get_num_orderings() << " orderings" << endl; + << landmark_graph->get_num_conjunctive_landmarks() << " are conjunctive." << endl; + log << landmark_graph->get_num_orderings() << " orderings" << endl; } } if (log.is_at_least_debug()) { - dump_landmark_graph(task_proxy, *lm_graph, log); + dump_landmark_graph(task_proxy, *landmark_graph, log); } - return lm_graph; + return landmark_graph; } bool LandmarkFactory::is_landmark_precondition( @@ -129,7 +129,7 @@ void LandmarkFactory::discard_all_orderings() { if (log.is_at_least_normal()) { log << "Removing all orderings." << endl; } - for (const auto &node : *lm_graph) { + for (const auto &node : *landmark_graph) { node->children.clear(); node->parents.clear(); } diff --git a/src/search/landmarks/landmark_factory.h b/src/search/landmarks/landmark_factory.h index 6165c76a31..e6d483ae48 100644 --- a/src/search/landmarks/landmark_factory.h +++ b/src/search/landmarks/landmark_factory.h @@ -30,7 +30,7 @@ class LandmarkFactory { virtual ~LandmarkFactory() = default; LandmarkFactory(const LandmarkFactory &) = delete; - std::shared_ptr compute_lm_graph(const std::shared_ptr &task); + std::shared_ptr compute_landmark_graph(const std::shared_ptr &task); virtual bool supports_conditional_effects() const = 0; @@ -41,7 +41,7 @@ class LandmarkFactory { protected: explicit LandmarkFactory(utils::Verbosity verbosity); mutable utils::LogProxy log; - std::shared_ptr lm_graph; + std::shared_ptr landmark_graph; bool achievers_calculated = false; void add_ordering(LandmarkNode &from, LandmarkNode &to, OrderingType type); @@ -56,7 +56,7 @@ class LandmarkFactory { } private: - AbstractTask *lm_graph_task; + AbstractTask *landmark_graph_task; std::vector>> operators_eff_lookup; virtual void generate_landmarks(const std::shared_ptr &task) = 0; diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 4c5135a5c7..dc6d08f194 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -605,7 +605,7 @@ void LandmarkFactoryHM::initialize(const TaskProxy &task_proxy) { void LandmarkFactoryHM::postprocess(const TaskProxy &task_proxy) { if (!conjunctive_landmarks) discard_conjunctive_landmarks(); - lm_graph->set_landmark_ids(); + landmark_graph->set_landmark_ids(); if (!use_orders) discard_all_orderings(); @@ -614,12 +614,12 @@ void LandmarkFactoryHM::postprocess(const TaskProxy &task_proxy) { } void LandmarkFactoryHM::discard_conjunctive_landmarks() { - if (lm_graph->get_num_conjunctive_landmarks() > 0) { + if (landmark_graph->get_num_conjunctive_landmarks() > 0) { if (log.is_at_least_normal()) { - log << "Discarding " << lm_graph->get_num_conjunctive_landmarks() + log << "Discarding " << landmark_graph->get_num_conjunctive_landmarks() << " conjunctive landmarks" << endl; } - lm_graph->remove_node_if( + landmark_graph->remove_node_if( [](const LandmarkNode &node) {return node.get_landmark().is_conjunctive;}); } } @@ -634,8 +634,8 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { VariablesProxy variables = task_proxy.get_variables(); // first_achievers are already filled in by compute_h_m_landmarks // here only have to do possible_achievers - for (const auto &lm_node : *lm_graph) { - Landmark &landmark = lm_node->get_landmark(); + for (const auto &node : *landmark_graph) { + Landmark &landmark = node->get_landmark(); set candidates; // put all possible adders in candidates set for (const FactPair &atom : landmark.atoms) { @@ -648,14 +648,14 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { FluentSet pre = get_operator_precondition(operators[op_id]); size_t j; for (j = 0; j < landmark.atoms.size(); ++j) { - const FactPair &lm_fact = landmark.atoms[j]; - // action adds this element of lm as well - if (find(post.begin(), post.end(), lm_fact) != post.end()) + const FactPair &atom = landmark.atoms[j]; + // action adds this element of landmark as well + if (find(post.begin(), post.end(), atom) != post.end()) continue; bool is_mutex = false; for (const FactPair &fluent : post) { if (variables[fluent.var].get_fact(fluent.value).is_mutex( - variables[lm_fact.var].get_fact(lm_fact.value))) { + variables[atom.var].get_fact(atom.value))) { is_mutex = true; break; } @@ -667,7 +667,7 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { // we know that lm_val is not added by the operator // so if it incompatible with the pc, this can't be an achiever if (variables[fluent.var].get_fact(fluent.value).is_mutex( - variables[lm_fact.var].get_fact(lm_fact.value))) { + variables[atom.var].get_fact(atom.value))) { is_mutex = true; break; } @@ -691,7 +691,7 @@ void LandmarkFactoryHM::free_unneeded_memory() { utils::release_vector_memory(unsat_pc_count_); set_indices_.clear(); - lm_node_table_.clear(); + landmark_node_table.clear(); } // called when a fact is discovered or its landmarks change @@ -922,8 +922,8 @@ void LandmarkFactoryHM::compute_noop_landmarks( } } -void LandmarkFactoryHM::add_lm_node(int set_index, bool goal) { - if (lm_node_table_.find(set_index) == lm_node_table_.end()) { +void LandmarkFactoryHM::add_landmark_node(int set_index, bool goal) { + if (landmark_node_table.find(set_index) == landmark_node_table.end()) { const HMEntry &hm_entry = h_m_table_[set_index]; vector facts(hm_entry.fluents); utils::sort_unique(facts); @@ -932,7 +932,7 @@ void LandmarkFactoryHM::add_lm_node(int set_index, bool goal) { landmark.first_achievers.insert( hm_entry.first_achievers.begin(), hm_entry.first_achievers.end()); - lm_node_table_[set_index] = &lm_graph->add_landmark(move(landmark)); + landmark_node_table[set_index] = &landmark_graph->add_landmark(move(landmark)); } } @@ -946,7 +946,7 @@ void LandmarkFactoryHM::generate_landmarks( FluentSet goals = task_properties::get_fact_pairs(task_proxy.get_goals()); VariablesProxy variables = task_proxy.get_variables(); get_m_sets(variables, m_, goal_subsets, goals); - list all_lms; + list all_landmarks; for (const FluentSet &goal_subset : goal_subsets) { assert(set_indices_.find(goal_subset) != set_indices_.end()); @@ -962,22 +962,22 @@ void LandmarkFactoryHM::generate_landmarks( } // set up goals landmarks for processing - union_with(all_lms, h_m_table_[set_index].landmarks); + union_with(all_landmarks, h_m_table_[set_index].landmarks); - // the goal itself is also a lm - insert_into(all_lms, set_index); + // the goal itself is also a landmark + insert_into(all_landmarks, set_index); // make a node for the goal, with in_goal = true; - add_lm_node(set_index, true); + add_landmark_node(set_index, true); } - // now make remaining lm nodes - for (int lm : all_lms) { - add_lm_node(lm, false); + // now make remaining landmark nodes + for (int landmark : all_landmarks) { + add_landmark_node(landmark, false); } if (use_orders) { // do reduction of graph // if f2 is landmark for f1, subtract landmark set of f2 from that of f1 - for (int f1 : all_lms) { + for (int f1 : all_landmarks) { list everything_to_remove; for (int f2 : h_m_table_[f1].landmarks) { union_with(everything_to_remove, h_m_table_[f2].landmarks); @@ -991,16 +991,16 @@ void LandmarkFactoryHM::generate_landmarks( // and add the orderings. - for (int set_index : all_lms) { - for (int lm : h_m_table_[set_index].landmarks) { - assert(lm_node_table_.find(lm) != lm_node_table_.end()); - assert(lm_node_table_.find(set_index) != lm_node_table_.end()); + for (int set_index : all_landmarks) { + for (int landmark : h_m_table_[set_index].landmarks) { + assert(landmark_node_table.find(landmark) != landmark_node_table.end()); + assert(landmark_node_table.find(set_index) != landmark_node_table.end()); - add_ordering(*lm_node_table_[lm], *lm_node_table_[set_index], + add_ordering(*landmark_node_table[landmark], *landmark_node_table[set_index], OrderingType::NATURAL); } for (int gn : h_m_table_[set_index].necessary) { - add_ordering(*lm_node_table_[gn], *lm_node_table_[set_index], + add_ordering(*landmark_node_table[gn], *landmark_node_table[set_index], OrderingType::GREEDY_NECESSARY); } } diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 9323075cc0..557a4c29a2 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -85,7 +85,7 @@ class LandmarkFactoryHM : public LandmarkFactory { void calc_achievers(const TaskProxy &task_proxy); - void add_lm_node(int set_index, bool goal = false); + void add_landmark_node(int set_index, bool goal = false); void initialize(const TaskProxy &task_proxy); void free_unneeded_memory(); @@ -97,7 +97,7 @@ class LandmarkFactoryHM : public LandmarkFactory { const bool conjunctive_landmarks; const bool use_orders; - std::map lm_node_table_; + std::map landmark_node_table; std::vector h_m_table_; std::vector pm_ops_; diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index 2dd2bfeb8b..0329b1c815 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -18,7 +18,7 @@ LandmarkFactoryMerged::LandmarkFactoryMerged( const vector> &lm_factories, utils::Verbosity verbosity) : LandmarkFactory(verbosity), - lm_factories(lm_factories) { + landmark_factories(lm_factories) { utils::verify_list_not_empty(lm_factories, "lm_factories"); } @@ -26,15 +26,15 @@ LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( const Landmark &landmark) const { if (!landmark.is_disjunctive && !landmark.is_conjunctive) { const FactPair &atom = landmark.atoms[0]; - if (lm_graph->contains_simple_landmark(atom)) - return &lm_graph->get_simple_landmark_node(atom); + if (landmark_graph->contains_simple_landmark(atom)) + return &landmark_graph->get_simple_landmark_node(atom); else return nullptr; } else if (landmark.is_disjunctive) { const utils::HashSet atoms( landmark.atoms.begin(), landmark.atoms.end()); - if (lm_graph->contains_identical_disjunctive_landmark(atoms)) - return &lm_graph->get_disjunctive_landmark_node(landmark.atoms[0]); + if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) + return &landmark_graph->get_disjunctive_landmark_node(landmark.atoms[0]); else return nullptr; } else if (landmark.is_conjunctive) { @@ -47,32 +47,34 @@ LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( void LandmarkFactoryMerged::generate_landmarks( const shared_ptr &task) { if (log.is_at_least_normal()) { - log << "Merging " << lm_factories.size() << " landmark graphs" << endl; + log << "Merging " << landmark_factories.size() + << " landmark graphs" << endl; } - vector> lm_graphs; - lm_graphs.reserve(lm_factories.size()); + vector> landmark_graphs; + landmark_graphs.reserve(landmark_factories.size()); achievers_calculated = true; - for (const shared_ptr &lm_factory : lm_factories) { - lm_graphs.push_back(lm_factory->compute_lm_graph(task)); - achievers_calculated &= lm_factory->achievers_are_calculated(); + for (const shared_ptr &landmark_factory : landmark_factories) { + landmark_graphs.push_back( + landmark_factory->compute_landmark_graph(task)); + achievers_calculated &= landmark_factory->achievers_are_calculated(); } if (log.is_at_least_normal()) { log << "Adding simple landmarks" << endl; } - for (size_t i = 0; i < lm_graphs.size(); ++i) { + for (size_t i = 0; i < landmark_graphs.size(); ++i) { // TODO: loop over landmarks instead - for (const auto &lm_node : *lm_graphs[i]) { - const Landmark &landmark = lm_node->get_landmark(); + for (const auto &node : *landmark_graphs[i]) { + const Landmark &landmark = node->get_landmark(); if (landmark.is_conjunctive) { cerr << "Don't know how to handle conjunctive landmarks yet" << endl; utils::exit_with(ExitCode::SEARCH_UNSUPPORTED); } else if (landmark.is_disjunctive) { continue; - } else if (!lm_graph->contains_landmark(landmark.atoms[0])) { + } else if (!landmark_graph->contains_landmark(landmark.atoms[0])) { Landmark copy(landmark); - lm_graph->add_landmark(move(copy)); + landmark_graph->add_landmark(move(copy)); } } } @@ -80,9 +82,9 @@ void LandmarkFactoryMerged::generate_landmarks( if (log.is_at_least_normal()) { log << "Adding disjunctive landmarks" << endl; } - for (size_t i = 0; i < lm_graphs.size(); ++i) { - for (const auto &lm_node : *lm_graphs[i]) { - const Landmark &landmark = lm_node->get_landmark(); + for (const shared_ptr &graph_to_merge : landmark_graphs) { + for (const auto &node : *graph_to_merge) { + const Landmark &landmark = node->get_landmark(); if (landmark.is_disjunctive) { /* TODO: It seems that disjunctive landmarks are only added if @@ -94,12 +96,12 @@ void LandmarkFactoryMerged::generate_landmarks( */ bool exists = any_of(landmark.atoms.begin(), landmark.atoms.end(), - [&](const FactPair &lm_fact) { - return lm_graph->contains_landmark(lm_fact); + [&](const FactPair &atom) { + return landmark_graph->contains_landmark(atom); }); if (!exists) { Landmark copy(landmark); - lm_graph->add_landmark(move(copy)); + landmark_graph->add_landmark(move(copy)); } } } @@ -108,8 +110,8 @@ void LandmarkFactoryMerged::generate_landmarks( if (log.is_at_least_normal()) { log << "Adding orderings" << endl; } - for (size_t i = 0; i < lm_graphs.size(); ++i) { - for (const auto &from_orig : *lm_graphs[i]) { + for (size_t i = 0; i < landmark_graphs.size(); ++i) { + for (const auto &from_orig : *landmark_graphs[i]) { LandmarkNode *from = get_matching_landmark(from_orig->get_landmark()); if (from) { for (const auto &to : from_orig->children) { @@ -135,16 +137,14 @@ void LandmarkFactoryMerged::generate_landmarks( } void LandmarkFactoryMerged::postprocess() { - lm_graph->set_landmark_ids(); + landmark_graph->set_landmark_ids(); } bool LandmarkFactoryMerged::supports_conditional_effects() const { - for (const shared_ptr &lm_factory : lm_factories) { - if (!lm_factory->supports_conditional_effects()) { - return false; - } - } - return true; + return all_of(landmark_factories.begin(), landmark_factories.end(), + [&](const shared_ptr &landmark_factory) { + return landmark_factory->supports_conditional_effects(); + }); } class LandmarkFactoryMergedFeature diff --git a/src/search/landmarks/landmark_factory_merged.h b/src/search/landmarks/landmark_factory_merged.h index f4abb84e5e..e40df1146c 100644 --- a/src/search/landmarks/landmark_factory_merged.h +++ b/src/search/landmarks/landmark_factory_merged.h @@ -7,7 +7,7 @@ namespace landmarks { class LandmarkFactoryMerged : public LandmarkFactory { - std::vector> lm_factories; + std::vector> landmark_factories; virtual void generate_landmarks(const std::shared_ptr &task) override; void postprocess(); diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 5fc9cbb671..1ec00f1509 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -15,16 +15,17 @@ LandmarkFactoryReasonableOrdersHPS::LandmarkFactoryReasonableOrdersHPS( const shared_ptr &lm_factory, utils::Verbosity verbosity) : LandmarkFactory(verbosity), - lm_factory(lm_factory) { + landmark_factory(lm_factory) { } -void LandmarkFactoryReasonableOrdersHPS::generate_landmarks(const shared_ptr &task) { +void LandmarkFactoryReasonableOrdersHPS::generate_landmarks( + const shared_ptr &task) { if (log.is_at_least_normal()) { log << "Building a landmark graph with reasonable orders." << endl; } - lm_graph = lm_factory->compute_lm_graph(task); - achievers_calculated = lm_factory->achievers_are_calculated(); + landmark_graph = landmark_factory->compute_landmark_graph(task); + achievers_calculated = landmark_factory->achievers_are_calculated(); TaskProxy task_proxy(*task); if (log.is_at_least_normal()) { @@ -47,13 +48,13 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orders( */ State initial_state = task_proxy.get_initial_state(); int variables_size = task_proxy.get_variables().size(); - for (const auto &node_p : *lm_graph) { + for (const auto &node_p : *landmark_graph) { const Landmark &landmark = node_p->get_landmark(); if (landmark.is_disjunctive) continue; if (landmark.is_true_in_goal) { - for (const auto &node2_p : *lm_graph) { + for (const auto &node2_p : *landmark_graph) { const Landmark &landmark2 = node2_p->get_landmark(); if (landmark == landmark2 || landmark2.is_disjunctive) continue; @@ -359,7 +360,7 @@ bool LandmarkFactoryReasonableOrdersHPS::effect_always_happens( } bool LandmarkFactoryReasonableOrdersHPS::supports_conditional_effects() const { - return lm_factory->supports_conditional_effects(); + return landmark_factory->supports_conditional_effects(); } class LandmarkFactoryReasonableOrdersHPSFeature diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h index 7b85bba000..3effeb4acd 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h @@ -5,7 +5,7 @@ namespace landmarks { class LandmarkFactoryReasonableOrdersHPS : public LandmarkFactory { - std::shared_ptr lm_factory; + std::shared_ptr landmark_factory; virtual void generate_landmarks(const std::shared_ptr &task) override; diff --git a/src/search/landmarks/landmark_factory_relaxation.cc b/src/search/landmarks/landmark_factory_relaxation.cc index 47a4f01242..847a0a1900 100644 --- a/src/search/landmarks/landmark_factory_relaxation.cc +++ b/src/search/landmarks/landmark_factory_relaxation.cc @@ -22,7 +22,7 @@ void LandmarkFactoryRelaxation::generate_landmarks(const shared_ptrset_landmark_ids(); + landmark_graph->set_landmark_ids(); calc_achievers(task_proxy, exploration); } @@ -30,8 +30,8 @@ void LandmarkFactoryRelaxation::calc_achievers( const TaskProxy &task_proxy, Exploration &exploration) { assert(!achievers_calculated); VariablesProxy variables = task_proxy.get_variables(); - for (const auto &lm_node : *lm_graph) { - Landmark &landmark = lm_node->get_landmark(); + for (const auto &node : *landmark_graph) { + Landmark &landmark = node->get_landmark(); for (const FactPair &atom : landmark.atoms) { const vector &ops = get_operators_including_eff(atom); landmark.possible_achievers.insert(ops.begin(), ops.end()); @@ -46,7 +46,7 @@ void LandmarkFactoryRelaxation::calc_achievers( for (int op_or_axom_id : landmark.possible_achievers) { OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axom_id); - if (possibly_reaches_lm(op, reached, landmark)) { + if (possibly_reaches_landmark(op, reached, landmark)) { landmark.first_achievers.insert(op_or_axom_id); } } diff --git a/src/search/landmarks/landmark_factory_rpg_exhaust.cc b/src/search/landmarks/landmark_factory_rpg_exhaust.cc index e9a38c1892..6dc7d8af35 100644 --- a/src/search/landmarks/landmark_factory_rpg_exhaust.cc +++ b/src/search/landmarks/landmark_factory_rpg_exhaust.cc @@ -32,17 +32,17 @@ void LandmarkFactoryRpgExhaust::generate_relaxed_landmarks( // insert goal landmarks and mark them as goals for (FactProxy goal : task_proxy.get_goals()) { Landmark landmark({goal.get_pair()}, false, false, true); - lm_graph->add_landmark(move(landmark)); + landmark_graph->add_landmark(move(landmark)); } // test all other possible facts for (VariableProxy var : task_proxy.get_variables()) { for (int value = 0; value < var.get_domain_size(); ++value) { - const FactPair lm(var.get_id(), value); - if (!lm_graph->contains_simple_landmark(lm)) { - Landmark landmark({lm}, false, false); + const FactPair atom(var.get_id(), value); + if (!landmark_graph->contains_simple_landmark(atom)) { + Landmark landmark({atom}, false, false); if (!relaxed_task_solvable(task_proxy, exploration, landmark, use_unary_relaxation)) { - lm_graph->add_landmark(move(landmark)); + landmark_graph->add_landmark(move(landmark)); } } } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 80137053f5..0e0e2db177 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -71,7 +71,7 @@ void LandmarkFactoryRpgSasp::add_dtg_successor(int var_id, int pre, int post) { dtg_successors[var_id][pre].insert(post); } -void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_lm( +void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_landmark( const TaskProxy &task_proxy, const Landmark &landmark, const OperatorProxy &op, unordered_map &result) const { // Computes a subset of the actual preconditions of o for achieving lmp - takes into account @@ -106,23 +106,23 @@ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_lm( } // Check for lmp in conditional effects - unordered_set lm_props_achievable; + unordered_set achievable_atom_indices; for (EffectProxy effect : effects) { FactProxy effect_fact = effect.get_fact(); for (size_t j = 0; j < landmark.atoms.size(); ++j) if (landmark.atoms[j] == effect_fact.get_pair()) - lm_props_achievable.insert(j); + achievable_atom_indices.insert(j); } // Intersect effect conditions of all effects that can achieve lmp unordered_map intersection; bool init = true; - for (int lm_prop : lm_props_achievable) { + for (int index : achievable_atom_indices) { for (EffectProxy effect : effects) { FactProxy effect_fact = effect.get_fact(); if (!init && intersection.empty()) break; unordered_map current_cond; - if (landmark.atoms[lm_prop] == effect_fact.get_pair()) { + if (landmark.atoms[index] == effect_fact.get_pair()) { EffectConditionsProxy effect_conditions = effect.get_conditions(); if (effect_conditions.empty()) { intersection.clear(); @@ -143,17 +143,17 @@ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_lm( result.insert(intersection.begin(), intersection.end()); } -void LandmarkFactoryRpgSasp::found_simple_lm_and_order( +void LandmarkFactoryRpgSasp::found_simple_landmark_and_ordering( const FactPair &atom, LandmarkNode &node, OrderingType type) { - if (lm_graph->contains_simple_landmark(atom)) { + if (landmark_graph->contains_simple_landmark(atom)) { LandmarkNode &simple_landmark = - lm_graph->get_simple_landmark_node(atom); + landmark_graph->get_simple_landmark_node(atom); add_ordering(simple_landmark, node, type); return; } Landmark landmark({atom}, false, false); - if (lm_graph->contains_disjunctive_landmark(atom)) { + if (landmark_graph->contains_disjunctive_landmark(atom)) { // In issue1004, we fixed a bug in this part of the code. It now removes // the disjunctive landmark along with all its orderings from the // landmark graph and adds a new simple landmark node. Before this @@ -163,71 +163,72 @@ void LandmarkFactoryRpgSasp::found_simple_lm_and_order( // Simple landmarks are more informative than disjunctive ones, // remove disj. landmark and add simple one - LandmarkNode *disj_lm = &lm_graph->get_disjunctive_landmark_node(atom); + LandmarkNode *disjunctive_landmark_node = + &landmark_graph->get_disjunctive_landmark_node(atom); - // Remove all pointers to disj_lm from internal data structures (i.e., - // the list of open landmarks and forward orders) - auto it = find(open_landmarks.begin(), open_landmarks.end(), disj_lm); + /* Remove all pointers to `disjunctive_landmark_node` from internal data + structures (i.e., the list of open landmarks and forward orders). */ + auto it = find(open_landmarks.begin(), open_landmarks.end(), disjunctive_landmark_node); if (it != open_landmarks.end()) { open_landmarks.erase(it); } - forward_orders.erase(disj_lm); + forward_orders.erase(disjunctive_landmark_node); - // Retrieve incoming orderings from disj_lm. + // Retrieve incoming orderings from `disjunctive_landmark_node`. vector predecessors; - predecessors.reserve(disj_lm->parents.size()); - for (auto &pred : disj_lm->parents) { + predecessors.reserve(disjunctive_landmark_node->parents.size()); + for (auto &pred : disjunctive_landmark_node->parents) { predecessors.push_back(pred.first); } - // Remove disj_lm from landmark graph - lm_graph->remove_node(disj_lm); + // Remove `disjunctive_landmark_node` from landmark graph. + landmark_graph->remove_node(disjunctive_landmark_node); - // Add simple landmark node - LandmarkNode &simple_lm = lm_graph->add_landmark(move(landmark)); - open_landmarks.push_back(&simple_lm); - add_ordering(simple_lm, node, type); + // Add simple landmark node. + LandmarkNode &simple_landmark = + landmark_graph->add_landmark(move(landmark)); + open_landmarks.push_back(&simple_landmark); + add_ordering(simple_landmark, node, type); - // Add incoming orderings of replaced disj_lm as natural orderings to - // simple_lm + /* Add incoming orderings of replaced `disjunctive_landmark_node` as + natural orderings to `simple_landmark`. */ for (LandmarkNode *pred : predecessors) { - add_ordering(*pred, simple_lm, OrderingType::NATURAL); + add_ordering(*pred, simple_landmark, OrderingType::NATURAL); } } else { - LandmarkNode &simple_lm = lm_graph->add_landmark(move(landmark)); - open_landmarks.push_back(&simple_lm); - add_ordering(simple_lm, node, type); + LandmarkNode &simple_landmark = + landmark_graph->add_landmark(move(landmark)); + open_landmarks.push_back(&simple_landmark); + add_ordering(simple_landmark, node, type); } } -void LandmarkFactoryRpgSasp::found_disj_lm_and_order( +void LandmarkFactoryRpgSasp::found_disjunctive_landmark_and_ordering( const TaskProxy &task_proxy, const utils::HashSet &atoms, LandmarkNode &node, OrderingType type) { - bool simple_lm_exists = false; + bool simple_landmark_exists = false; // TODO: assign with FactPair::no_fact - FactPair lm_prop = FactPair::no_fact; State initial_state = task_proxy.get_initial_state(); - for (const FactPair &lm : atoms) { - if (initial_state[lm.var].get_value() == lm.value) { + for (const FactPair &atom : atoms) { + if (initial_state[atom.var].get_value() == atom.value) { return; } - if (lm_graph->contains_simple_landmark(lm)) { + if (landmark_graph->contains_simple_landmark(atom)) { // Propositions in this disj. LM exist already as simple LMs. - simple_lm_exists = true; - lm_prop = lm; + simple_landmark_exists = true; break; } } - LandmarkNode *new_lm_node; - if (simple_lm_exists) { + LandmarkNode *new_landmark_node; + if (simple_landmark_exists) { // Note: don't add orders as we can't be sure that they're correct return; - } else if (lm_graph->contains_overlapping_disjunctive_landmark(atoms)) { - if (lm_graph->contains_identical_disjunctive_landmark(atoms)) { + } else if (landmark_graph->contains_overlapping_disjunctive_landmark(atoms)) { + if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) { // LM already exists, just add order. - new_lm_node = - &lm_graph->get_disjunctive_landmark_node(*atoms.begin()); - add_ordering(*new_lm_node, node, type); + new_landmark_node = + &landmark_graph->get_disjunctive_landmark_node(*atoms.begin()); + add_ordering(*new_landmark_node, node, type); return; } // LM overlaps with existing disj. LM, do not add. @@ -236,9 +237,9 @@ void LandmarkFactoryRpgSasp::found_disj_lm_and_order( // This LM and no part of it exist, add the LM to the landmarks graph. Landmark landmark(vector(atoms.begin(), atoms.end()), true, false); - new_lm_node = &lm_graph->add_landmark(move(landmark)); - open_landmarks.push_back(new_lm_node); - add_ordering(*new_lm_node, node, type); + new_landmark_node = &landmark_graph->add_landmark(move(landmark)); + open_landmarks.push_back(new_landmark_node); + add_ordering(*new_landmark_node, node, type); } void LandmarkFactoryRpgSasp::compute_shared_preconditions( @@ -257,10 +258,10 @@ void LandmarkFactoryRpgSasp::compute_shared_preconditions( if (!init && shared_pre.empty()) break; - if (possibly_reaches_lm(op, reached, landmark)) { + if (possibly_reaches_landmark(op, reached, landmark)) { unordered_map next_pre; - get_greedy_preconditions_for_lm(task_proxy, landmark, - op, next_pre); + get_greedy_preconditions_for_landmark(task_proxy, landmark, + op, next_pre); if (init) { init = false; shared_pre = next_pre; @@ -364,10 +365,10 @@ void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( // proposition which operators use it for (size_t i = 0; i < op_or_axiom_ids.size(); ++i) { OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axiom_ids[i]); - if (possibly_reaches_lm(op, reached, landmark)) { + if (possibly_reaches_landmark(op, reached, landmark)) { ++num_ops; unordered_map next_pre; - get_greedy_preconditions_for_lm(task_proxy, landmark, op, next_pre); + get_greedy_preconditions_for_landmark(task_proxy, landmark, op, next_pre); for (const auto &pre : next_pre) { int disj_class = disjunction_classes[pre.first][pre.second]; if (disj_class == -1) { @@ -379,7 +380,7 @@ void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( // Only deal with propositions that are not shared preconditions // (those have been found already and are simple landmarks). const FactPair precondition(pre.first, pre.second); - if (!lm_graph->contains_simple_landmark(precondition)) { + if (!landmark_graph->contains_simple_landmark(precondition)) { preconditions[disj_class].push_back(precondition); used_operators[disj_class].insert(i); } @@ -408,16 +409,16 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( for (FactProxy goal : task_proxy.get_goals()) { Landmark landmark({goal.get_pair()}, false, false, true); - LandmarkNode &lm_node = lm_graph->add_landmark(move(landmark)); - open_landmarks.push_back(&lm_node); + LandmarkNode &node = landmark_graph->add_landmark(move(landmark)); + open_landmarks.push_back(&node); } State initial_state = task_proxy.get_initial_state(); while (!open_landmarks.empty()) { - LandmarkNode *lm_node = open_landmarks.front(); - Landmark &landmark = lm_node->get_landmark(); + LandmarkNode *node = open_landmarks.front(); + Landmark &landmark = node->get_landmark(); open_landmarks.pop_front(); - assert(forward_orders[lm_node].empty()); + assert(forward_orders[node].empty()); if (!landmark.is_true_in_state(initial_state)) { /* @@ -442,12 +443,12 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( necessary predecessors of *landmark*. */ for (const auto &pre : shared_pre) { - found_simple_lm_and_order( - FactPair(pre.first, pre.second), *lm_node, + found_simple_landmark_and_ordering( + FactPair(pre.first, pre.second), *node, OrderingType::GREEDY_NECESSARY); } // Extract additional orders from the relaxed planning graph and DTG. - approximate_lookahead_orders(task_proxy, reached, lm_node); + approximate_lookahead_orders(task_proxy, reached, node); // Process achieving operators again to find disjunctive LMs vector> disjunctive_pre; @@ -456,13 +457,13 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( for (const auto &preconditions : disjunctive_pre) // We don't want disjunctive LMs to get too big. if (preconditions.size() < 5) { - found_disj_lm_and_order( - task_proxy, preconditions, *lm_node, + found_disjunctive_landmark_and_ordering( + task_proxy, preconditions, *node, OrderingType::GREEDY_NECESSARY); } } } - add_lm_forward_orders(); + add_landmark_forward_orderings(); if (!disjunctive_landmarks) { discard_disjunctive_landmarks(); @@ -488,7 +489,7 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orders( /* Use domain transition graphs to find further orders. Only possible - if lmp is a simple landmark. + if landmark is a simple. */ const Landmark &landmark = node->get_landmark(); if (landmark.is_disjunctive) @@ -520,8 +521,8 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orders( initial state, we have found a new landmark. */ if (!domain_connectivity(initial_state, atom, exclude)) - found_simple_lm_and_order(FactPair(atom.var, value), *node, - OrderingType::NATURAL); + found_simple_landmark_and_ordering(FactPair(atom.var, value), *node, + OrderingType::NATURAL); } } @@ -564,11 +565,11 @@ bool LandmarkFactoryRpgSasp::domain_connectivity( void LandmarkFactoryRpgSasp::find_forward_orders( const VariablesProxy &variables, const vector> &reached, - LandmarkNode *lm_node) { + LandmarkNode *node) { /* - lm_node is ordered before any var-val pair that cannot be reached before - lm_node according to relaxed planning graph (as captured in reached). - These orders are saved in the node member variable "forward_orders". + `node` is ordered before any var-val pair that cannot be reached before + `node` according to relaxed planning graph (as captured in reached). + These orders are saved in the node member variable `forward_orders`. */ for (VariableProxy var : variables) for (int value = 0; value < var.get_domain_size(); ++value) { @@ -577,18 +578,19 @@ void LandmarkFactoryRpgSasp::find_forward_orders( const FactPair fact(var.get_id(), value); bool insert = true; - for (const FactPair &atom : lm_node->get_landmark().atoms) { + for (const FactPair &atom : node->get_landmark().atoms) { if (fact != atom) { - // Make sure there is no operator that reaches both lm and (var, value) at the same time + /* Make sure there is no operator that reaches both `atom` + and (var, value) at the same time. */ bool intersection_empty = true; const vector &reach_fact = get_operators_including_eff(fact); - const vector &reach_lm = + const vector &achievers = get_operators_including_eff(atom); for (size_t j = 0; j < reach_fact.size() && intersection_empty; ++j) - for (size_t k = 0; k < reach_lm.size() + for (size_t k = 0; k < achievers.size() && intersection_empty; ++k) - if (reach_fact[j] == reach_lm[k]) + if (reach_fact[j] == achievers[k]) intersection_empty = false; if (!intersection_empty) { @@ -601,16 +603,16 @@ void LandmarkFactoryRpgSasp::find_forward_orders( } } if (insert) - forward_orders[lm_node].insert(fact); + forward_orders[node].insert(fact); } } -void LandmarkFactoryRpgSasp::add_lm_forward_orders() { - for (const auto &node : *lm_graph) { +void LandmarkFactoryRpgSasp::add_landmark_forward_orderings() { + for (const auto &node : *landmark_graph) { for (const auto &node2_pair : forward_orders[node.get()]) { - if (lm_graph->contains_simple_landmark(node2_pair)) { + if (landmark_graph->contains_simple_landmark(node2_pair)) { LandmarkNode &node2 = - lm_graph->get_simple_landmark_node(node2_pair); + landmark_graph->get_simple_landmark_node(node2_pair); add_ordering(*node, node2, OrderingType::NATURAL); } } @@ -624,12 +626,12 @@ void LandmarkFactoryRpgSasp::discard_disjunctive_landmarks() { even if we don't want to use disjunctive landmarks during search. So we allow removing disjunctive landmarks after landmark generation. */ - if (lm_graph->get_num_disjunctive_landmarks() > 0) { + if (landmark_graph->get_num_disjunctive_landmarks() > 0) { if (log.is_at_least_normal()) { - log << "Discarding " << lm_graph->get_num_disjunctive_landmarks() + log << "Discarding " << landmark_graph->get_num_disjunctive_landmarks() << " disjunctive landmarks" << endl; } - lm_graph->remove_node_if( + landmark_graph->remove_node_if( [](const LandmarkNode &node) { return node.get_landmark().is_disjunctive; }); diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index d968feb289..7f14068b37 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -26,10 +26,10 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { void add_dtg_successor(int var_id, int pre, int post); void find_forward_orders(const VariablesProxy &variables, const std::vector> &reached, - LandmarkNode *lm_node); - void add_lm_forward_orders(); + LandmarkNode *node); + void add_landmark_forward_orderings(); - void get_greedy_preconditions_for_lm( + void get_greedy_preconditions_for_landmark( const TaskProxy &task_proxy, const Landmark &landmark, const OperatorProxy &op, std::unordered_map &result) const; @@ -46,15 +46,15 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { virtual void generate_relaxed_landmarks( const std::shared_ptr &task, Exploration &exploration) override; - void found_simple_lm_and_order(const FactPair &atom, LandmarkNode &node, - OrderingType type); - void found_disj_lm_and_order(const TaskProxy &task_proxy, - const utils::HashSet &atoms, - LandmarkNode &node, - OrderingType type); - void approximate_lookahead_orders(const TaskProxy &task_proxy, - const std::vector> &reached, - LandmarkNode *lmp); + void found_simple_landmark_and_ordering(const FactPair &atom, LandmarkNode &node, + OrderingType type); + void found_disjunctive_landmark_and_ordering(const TaskProxy &task_proxy, + const utils::HashSet &atoms, + LandmarkNode &node, + OrderingType type); + void approximate_lookahead_orders( + const TaskProxy &task_proxy, + const std::vector> &reached, LandmarkNode *node); bool domain_connectivity(const State &initial_state, const FactPair &landmark, const std::unordered_set &exclude); diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index c8430bf787..b9abf1981f 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -54,7 +54,7 @@ void LandmarkFactoryZhuGivan::extract_landmarks( log << "Problem not solvable, even if relaxed." << endl; } Landmark landmark({goal.get_pair()}, false, false, true); - lm_graph->add_landmark(move(landmark)); + landmark_graph->add_landmark(move(landmark)); return; } } @@ -62,32 +62,35 @@ void LandmarkFactoryZhuGivan::extract_landmarks( State initial_state = task_proxy.get_initial_state(); // insert goal landmarks and mark them as goals for (FactProxy goal : task_proxy.get_goals()) { - FactPair goal_lm = goal.get_pair(); + FactPair goal_landmark = goal.get_pair(); + // TODO: rename `lm_node` (avoid lm). LandmarkNode *lm_node; - if (lm_graph->contains_simple_landmark(goal_lm)) { - lm_node = &lm_graph->get_simple_landmark_node(goal_lm); + if (landmark_graph->contains_simple_landmark(goal_landmark)) { + lm_node = &landmark_graph->get_simple_landmark_node(goal_landmark); lm_node->get_landmark().is_true_in_goal = true; } else { - Landmark landmark({goal_lm}, false, false, true); - lm_node = &lm_graph->add_landmark(move(landmark)); + Landmark landmark({goal_landmark}, false, false, true); + lm_node = &landmark_graph->add_landmark(move(landmark)); } // extract landmarks from goal labels const plan_graph_node &goal_node = - last_prop_layer[goal_lm.var][goal_lm.value]; + last_prop_layer[goal_landmark.var][goal_landmark.value]; assert(goal_node.reached()); + // TODO: get rid of `lm` (avoid lm). for (const FactPair &lm : goal_node.labels) { - if (lm == goal_lm) // ignore label on itself + if (lm == goal_landmark) // ignore label on itself continue; LandmarkNode *node; // Add new landmarks - if (!lm_graph->contains_simple_landmark(lm)) { + if (!landmark_graph->contains_simple_landmark(lm)) { Landmark landmark({lm}, false, false); - node = &lm_graph->add_landmark(move(landmark)); + node = &landmark_graph->add_landmark(move(landmark)); } else { - node = &lm_graph->get_simple_landmark_node(lm); + node = &landmark_graph->get_simple_landmark_node(lm); } + // TODO: Update comment below after renaming. // Add order: lm ->_{nat} lm assert(node->parents.find(lm_node) == node->parents.end()); assert(lm_node->children.find(node) == lm_node->children.end()); @@ -131,14 +134,14 @@ LandmarkFactoryZhuGivan::PropositionLayer LandmarkFactoryZhuGivan::build_relaxed for (int op_or_axiom_id : triggered) { OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axiom_id); if (operator_applicable(op, current_prop_layer)) { - lm_set changed = apply_operator_and_propagate_labels( + LandmarkSet changed = apply_operator_and_propagate_labels( op, current_prop_layer, next_prop_layer); if (!changed.empty()) { changes = true; - for (const FactPair &lm : changed) + for (const FactPair &landmark : changed) next_triggered.insert( - triggers[lm.var][lm.value].begin(), - triggers[lm.var][lm.value].end()); + triggers[landmark.var][landmark.value].begin(), + triggers[landmark.var][landmark.value].end()); } } } @@ -166,32 +169,32 @@ bool LandmarkFactoryZhuGivan::operator_cond_effect_fires( return true; } -static lm_set _union(const lm_set &a, const lm_set &b) { +static LandmarkSet _union(const LandmarkSet &a, const LandmarkSet &b) { if (a.size() < b.size()) return _union(b, a); - lm_set result = a; + LandmarkSet result = a; - for (lm_set::const_iterator it = b.begin(); it != b.end(); ++it) + for (LandmarkSet::const_iterator it = b.begin(); it != b.end(); ++it) result.insert(*it); return result; } -static lm_set _intersection(const lm_set &a, const lm_set &b) { +static LandmarkSet _intersection(const LandmarkSet &a, const LandmarkSet &b) { if (a.size() > b.size()) return _intersection(b, a); - lm_set result; + LandmarkSet result; - for (lm_set::const_iterator it = a.begin(); it != a.end(); ++it) + for (LandmarkSet::const_iterator it = a.begin(); it != a.end(); ++it) if (b.find(*it) != b.end()) result.insert(*it); return result; } -lm_set LandmarkFactoryZhuGivan::union_of_precondition_labels(const OperatorProxy &op, - const PropositionLayer ¤t) const { - lm_set result; +LandmarkSet LandmarkFactoryZhuGivan::union_of_precondition_labels(const OperatorProxy &op, + const PropositionLayer ¤t) const { + LandmarkSet result; // TODO This looks like an O(n^2) algorithm where O(n log n) would do, a // bit like the Python string concatenation anti-pattern. @@ -202,18 +205,18 @@ lm_set LandmarkFactoryZhuGivan::union_of_precondition_labels(const OperatorProxy return result; } -lm_set LandmarkFactoryZhuGivan::union_of_condition_labels( +LandmarkSet LandmarkFactoryZhuGivan::union_of_condition_labels( const EffectConditionsProxy &effect_conditions, const PropositionLayer ¤t) const { - lm_set result; + LandmarkSet result; for (FactProxy effect_condition : effect_conditions) result = _union(result, current[effect_condition.get_variable().get_id()][effect_condition.get_value()].labels); return result; } -static bool _propagate_labels(lm_set &labels, const lm_set &new_labels, +static bool _propagate_labels(LandmarkSet &labels, const LandmarkSet &new_labels, const FactPair &prop) { - lm_set old_labels = labels; + LandmarkSet old_labels = labels; if (!labels.empty()) { labels = _intersection(labels, new_labels); @@ -232,13 +235,13 @@ static bool _propagate_labels(lm_set &labels, const lm_set &new_labels, return old_labels.size() != labels.size(); } -lm_set LandmarkFactoryZhuGivan::apply_operator_and_propagate_labels( +LandmarkSet LandmarkFactoryZhuGivan::apply_operator_and_propagate_labels( const OperatorProxy &op, const PropositionLayer ¤t, PropositionLayer &next) const { assert(operator_applicable(op, current)); - lm_set result; - lm_set precond_label_union = union_of_precondition_labels(op, current); + LandmarkSet result; + LandmarkSet precond_label_union = union_of_precondition_labels(op, current); for (EffectProxy effect : op.get_effects()) { FactPair effect_fact = effect.get_fact().get_pair(); @@ -247,7 +250,7 @@ lm_set LandmarkFactoryZhuGivan::apply_operator_and_propagate_labels( continue; if (operator_cond_effect_fires(effect.get_conditions(), current)) { - const lm_set precond_label_union_with_condeff = _union( + const LandmarkSet precond_label_union_with_condeff = _union( precond_label_union, union_of_condition_labels( // NOTE: this equals precond_label_union, if effects[i] is // not a conditional effect. @@ -282,7 +285,7 @@ void LandmarkFactoryZhuGivan::compute_triggers(const TaskProxy &task_proxy) { void LandmarkFactoryZhuGivan::add_operator_to_triggers(const OperatorProxy &op) { // Collect possible triggers first. - lm_set possible_triggers; + LandmarkSet possible_triggers; int op_or_axiom_id = get_operator_or_axiom_id(op); PreconditionsProxy preconditions = op.get_preconditions(); @@ -297,8 +300,8 @@ void LandmarkFactoryZhuGivan::add_operator_to_triggers(const OperatorProxy &op) operators_without_preconditions.push_back(op_or_axiom_id); // Add operator to triggers vector. - for (const FactPair &lm : possible_triggers) - triggers[lm.var][lm.value].push_back(op_or_axiom_id); + for (const FactPair &landmark : possible_triggers) + triggers[landmark.var][landmark.value].push_back(op_or_axiom_id); } bool LandmarkFactoryZhuGivan::supports_conditional_effects() const { diff --git a/src/search/landmarks/landmark_factory_zhu_givan.h b/src/search/landmarks/landmark_factory_zhu_givan.h index 97e1825378..e58b83b17b 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.h +++ b/src/search/landmarks/landmark_factory_zhu_givan.h @@ -10,12 +10,12 @@ #include namespace landmarks { -using lm_set = utils::HashSet; +using LandmarkSet = utils::HashSet; class LandmarkFactoryZhuGivan : public LandmarkFactoryRelaxation { class plan_graph_node { public: - lm_set labels; + LandmarkSet labels; inline bool reached() const { // NOTE: nodes are always labeled with itself, // if they have been reached @@ -45,18 +45,18 @@ class LandmarkFactoryZhuGivan : public LandmarkFactoryRelaxation { // propositions that: // (a) have just been reached OR (b) had their labels changed in next // proposition layer - lm_set apply_operator_and_propagate_labels(const OperatorProxy &op, - const PropositionLayer ¤t, PropositionLayer &next) const; + LandmarkSet apply_operator_and_propagate_labels(const OperatorProxy &op, + const PropositionLayer ¤t, PropositionLayer &next) const; // Calculate the union of precondition labels of op, using the // labels from current - lm_set union_of_precondition_labels(const OperatorProxy &op, - const PropositionLayer ¤t) const; + LandmarkSet union_of_precondition_labels(const OperatorProxy &op, + const PropositionLayer ¤t) const; // Calculate the union of precondition labels of a conditional effect, // using the labels from current - lm_set union_of_condition_labels(const EffectConditionsProxy &effect_conditions, - const PropositionLayer ¤t) const; + LandmarkSet union_of_condition_labels(const EffectConditionsProxy &effect_conditions, + const PropositionLayer ¤t) const; // Relaxed exploration, returns the last proposition layer // (the fixpoint) with labels diff --git a/src/search/landmarks/landmark_heuristic.cc b/src/search/landmarks/landmark_heuristic.cc index c7e4851078..971c1055df 100644 --- a/src/search/landmarks/landmark_heuristic.cc +++ b/src/search/landmarks/landmark_heuristic.cc @@ -23,7 +23,7 @@ LandmarkHeuristic::LandmarkHeuristic( } void LandmarkHeuristic::initialize( - const shared_ptr &lm_factory, bool prog_goal, + const shared_ptr &landmark_factory, bool prog_goal, bool prog_gn, bool prog_r) { /* Actually, we should test if this is the root task or a @@ -41,9 +41,9 @@ void LandmarkHeuristic::initialize( utils::exit_with(utils::ExitCode::SEARCH_UNSUPPORTED); } - compute_landmark_graph(lm_factory); - lm_status_manager = utils::make_unique_ptr( - *lm_graph, prog_goal, prog_gn, prog_r); + compute_landmark_graph(landmark_factory); + landmark_status_manager = utils::make_unique_ptr( + *landmark_graph, prog_goal, prog_gn, prog_r); initial_landmark_graph_has_cycle_of_natural_orderings = landmark_graph_has_cycle_of_natural_orderings(); @@ -63,10 +63,10 @@ void LandmarkHeuristic::initialize( } bool LandmarkHeuristic::landmark_graph_has_cycle_of_natural_orderings() { - int num_landmarks = lm_graph->get_num_landmarks(); + int num_landmarks = landmark_graph->get_num_landmarks(); vector closed(num_landmarks, false); vector visited(num_landmarks, false); - for (const auto &node : *lm_graph) { + for (const auto &node : *landmark_graph) { if (depth_first_search_for_cycle_of_natural_orderings( *node, closed, visited)) { return true; @@ -98,30 +98,30 @@ bool LandmarkHeuristic::depth_first_search_for_cycle_of_natural_orderings( } void LandmarkHeuristic::compute_landmark_graph( - const shared_ptr &lm_factory) { - utils::Timer lm_graph_timer; + const shared_ptr &landmark_factory) { + utils::Timer landmark_graph_timer; if (log.is_at_least_normal()) { log << "Generating landmark graph..." << endl; } - lm_graph = lm_factory->compute_lm_graph(task); - assert(lm_factory->achievers_are_calculated()); + landmark_graph = landmark_factory->compute_landmark_graph(task); + assert(landmark_factory->achievers_are_calculated()); if (log.is_at_least_normal()) { - log << "Landmark graph generation time: " << lm_graph_timer << endl; - log << "Landmark graph contains " << lm_graph->get_num_landmarks() + log << "Landmark graph generation time: " << landmark_graph_timer << endl; + log << "Landmark graph contains " << landmark_graph->get_num_landmarks() << " landmarks, of which " - << lm_graph->get_num_disjunctive_landmarks() + << landmark_graph->get_num_disjunctive_landmarks() << " are disjunctive and " - << lm_graph->get_num_conjunctive_landmarks() + << landmark_graph->get_num_conjunctive_landmarks() << " are conjunctive." << endl; - log << "Landmark graph contains " << lm_graph->get_num_orderings() + log << "Landmark graph contains " << landmark_graph->get_num_orderings() << " orderings." << endl; } } void LandmarkHeuristic::compute_landmarks_achieved_by_atom() { - for (const auto &node : *lm_graph) { + for (const auto &node : *landmark_graph) { const int id = node->get_id(); const Landmark &landmark = node->get_landmark(); if (landmark.is_conjunctive) { @@ -203,7 +203,7 @@ int LandmarkHeuristic::compute_heuristic(const State &ancestor_state) { int h = get_heuristic_value(ancestor_state); if (use_preferred_operators) { ConstBitsetView future = - lm_status_manager->get_future_landmarks(ancestor_state); + landmark_status_manager->get_future_landmarks(ancestor_state); State state = convert_ancestor_state(ancestor_state); generate_preferred_operators(state, future); } @@ -211,12 +211,12 @@ int LandmarkHeuristic::compute_heuristic(const State &ancestor_state) { } void LandmarkHeuristic::notify_initial_state(const State &initial_state) { - lm_status_manager->progress_initial_state(initial_state); + landmark_status_manager->progress_initial_state(initial_state); } void LandmarkHeuristic::notify_state_transition( const State &parent_state, OperatorID op_id, const State &state) { - lm_status_manager->progress(parent_state, op_id, state); + landmark_status_manager->progress(parent_state, op_id, state); if (cache_evaluator_values) { /* TODO: It may be more efficient to check that the past landmark set has actually changed and only then mark the h value as dirty. */ diff --git a/src/search/landmarks/landmark_heuristic.h b/src/search/landmarks/landmark_heuristic.h index c85fb1f884..a677d20e43 100644 --- a/src/search/landmarks/landmark_heuristic.h +++ b/src/search/landmarks/landmark_heuristic.h @@ -28,19 +28,19 @@ class LandmarkHeuristic : public Heuristic { const LandmarkNode &node, std::vector &closed, std::vector &visited); protected: - std::shared_ptr lm_graph; + std::shared_ptr landmark_graph; const bool use_preferred_operators; // This map remains empty unless *use_preferred_operators* is true. utils::HashMap> landmarks_achieved_by_atom; - std::unique_ptr lm_status_manager; + std::unique_ptr landmark_status_manager; std::unique_ptr successor_generator; void initialize( - const std::shared_ptr &lm_factory, + const std::shared_ptr &landmark_factory, bool prog_goal, bool prog_gn, bool prog_r); void compute_landmark_graph( - const std::shared_ptr &lm_factory); + const std::shared_ptr &landmark_factory); virtual int get_heuristic_value(const State &ancestor_state) = 0; diff --git a/src/search/landmarks/landmark_status_manager.cc b/src/search/landmarks/landmark_status_manager.cc index 6118ff800a..6b4c6a782f 100644 --- a/src/search/landmarks/landmark_status_manager.cc +++ b/src/search/landmarks/landmark_status_manager.cc @@ -51,27 +51,27 @@ static vector>> get_reas } LandmarkStatusManager::LandmarkStatusManager( - LandmarkGraph &graph, - bool progress_goals, - bool progress_greedy_necessary_orderings, - bool progress_reasonable_orderings) - : lm_graph(graph), - goal_landmarks(progress_goals ? get_goal_landmarks(graph) + LandmarkGraph &landmark_graph, + const bool progress_goals, + const bool progress_greedy_necessary_orderings, + const bool progress_reasonable_orderings) + : landmark_graph(landmark_graph), + goal_landmarks(progress_goals ? get_goal_landmarks(landmark_graph) : vector{}), greedy_necessary_children( progress_greedy_necessary_orderings - ? get_greedy_necessary_children(graph) + ? get_greedy_necessary_children(landmark_graph) : vector>>{}), reasonable_parents( progress_reasonable_orderings - ? get_reasonable_parents(graph) + ? get_reasonable_parents(landmark_graph) : vector>>{}), - /* We initialize to true in *past_landmarks* because true is the + /* We initialize to true in `past_landmarks` because true is the neutral element of conjunction/set intersection. */ - past_landmarks(vector(graph.get_num_landmarks(), true)), - /* We initialize to false in *future_landmarks* because false is + past_landmarks(vector(landmark_graph.get_num_landmarks(), true)), + /* We initialize to false in `future_landmarks` because false is the neutral element for disjunction/set union. */ - future_landmarks(vector(graph.get_num_landmarks(), false)) { + future_landmarks(vector(landmark_graph.get_num_landmarks(), false)) { } BitsetView LandmarkStatusManager::get_past_landmarks(const State &state) { @@ -94,10 +94,10 @@ void LandmarkStatusManager::progress_initial_state(const State &initial_state) { BitsetView past = get_past_landmarks(initial_state); BitsetView future = get_future_landmarks(initial_state); - for (const auto &node : lm_graph) { - int id = node->get_id(); - const Landmark &lm = node->get_landmark(); - if (lm.is_true_in_state(initial_state)) { + for (const auto &node : landmark_graph) { + const int id = node->get_id(); + const Landmark &landmark = node->get_landmark(); + if (landmark.is_true_in_state(initial_state)) { assert(past.test(id)); /* A landmark B that holds initially is always past. If there is a @@ -113,7 +113,8 @@ void LandmarkStatusManager::progress_initial_state(const State &initial_state) { */ if (any_of(node->parents.begin(), node->parents.end(), [initial_state](auto &parent) { - Landmark &landmark = parent.first->get_landmark(); + const Landmark &landmark = + parent.first->get_landmark(); return !landmark.is_true_in_state(initial_state); })) { future.set(id); @@ -139,10 +140,10 @@ void LandmarkStatusManager::progress( ConstBitsetView parent_future = get_future_landmarks(parent_ancestor_state); BitsetView future = get_future_landmarks(ancestor_state); - assert(past.size() == lm_graph.get_num_landmarks()); - assert(parent_past.size() == lm_graph.get_num_landmarks()); - assert(future.size() == lm_graph.get_num_landmarks()); - assert(parent_future.size() == lm_graph.get_num_landmarks()); + assert(past.size() == landmark_graph.get_num_landmarks()); + assert(parent_past.size() == landmark_graph.get_num_landmarks()); + assert(future.size() == landmark_graph.get_num_landmarks()); + assert(parent_future.size() == landmark_graph.get_num_landmarks()); progress_landmarks( parent_past, parent_future, parent_ancestor_state, @@ -156,11 +157,11 @@ void LandmarkStatusManager::progress_landmarks( ConstBitsetView &parent_past, ConstBitsetView &parent_future, const State &parent_ancestor_state, BitsetView &past, BitsetView &future, const State &ancestor_state) { - for (const auto &node : lm_graph) { + for (const auto &node : landmark_graph) { int id = node->get_id(); - const Landmark &lm = node->get_landmark(); + const Landmark &landmark = node->get_landmark(); if (parent_future.test(id)) { - if (!lm.is_true_in_state(ancestor_state)) { + if (!landmark.is_true_in_state(ancestor_state)) { /* A landmark that is future in the parent remains future if it does not hold in the current state. If it also @@ -170,7 +171,7 @@ void LandmarkStatusManager::progress_landmarks( if (!parent_past.test(id)) { past.reset(id); } - } else if (lm.is_true_in_state(parent_ancestor_state)) { + } else if (landmark.is_true_in_state(parent_ancestor_state)) { /* If the landmark held in the parent already, then it was not added by this transition and should remain @@ -195,11 +196,11 @@ void LandmarkStatusManager::progress_goals(const State &ancestor_state, void LandmarkStatusManager::progress_greedy_necessary_orderings( const State &ancestor_state, const BitsetView &past, BitsetView &future) { for (auto &[tail, children] : greedy_necessary_children) { - const Landmark &lm = tail->get_landmark(); + const Landmark &landmark = tail->get_landmark(); assert(!children.empty()); for (auto &child : children) { if (!past.test(child->get_id()) - && !lm.is_true_in_state(ancestor_state)) { + && !landmark.is_true_in_state(ancestor_state)) { future.set(tail->get_id()); break; } diff --git a/src/search/landmarks/landmark_status_manager.h b/src/search/landmarks/landmark_status_manager.h index d53815d072..e5a3b1b5ef 100644 --- a/src/search/landmarks/landmark_status_manager.h +++ b/src/search/landmarks/landmark_status_manager.h @@ -10,7 +10,7 @@ class LandmarkGraph; class LandmarkNode; class LandmarkStatusManager { - LandmarkGraph &lm_graph; + LandmarkGraph &landmark_graph; const std::vector goal_landmarks; const std::vector>> greedy_necessary_children; const std::vector>> reasonable_parents; @@ -30,7 +30,7 @@ class LandmarkStatusManager { const BitsetView &past, BitsetView &future); public: LandmarkStatusManager( - LandmarkGraph &graph, + LandmarkGraph &landmark_graph, bool progress_goals, bool progress_greedy_necessary_orderings, bool progress_reasonable_orderings); diff --git a/src/search/landmarks/landmark_sum_heuristic.cc b/src/search/landmarks/landmark_sum_heuristic.cc index 55b0dc076c..35f0a1eb03 100644 --- a/src/search/landmarks/landmark_sum_heuristic.cc +++ b/src/search/landmarks/landmark_sum_heuristic.cc @@ -31,7 +31,7 @@ static bool are_dead_ends_reliable( } LandmarkSumHeuristic::LandmarkSumHeuristic( - const shared_ptr &lm_factory, + const shared_ptr &landmark_factory, bool pref, bool prog_goal, bool prog_gn, bool prog_r, const shared_ptr &transform, bool cache_estimates, const string &description, utils::Verbosity verbosity, @@ -41,11 +41,11 @@ LandmarkSumHeuristic::LandmarkSumHeuristic( tasks::get_default_value_axioms_task_if_needed(transform, axioms), cache_estimates, description, verbosity), dead_ends_reliable( - are_dead_ends_reliable(lm_factory, task_proxy)) { + are_dead_ends_reliable(landmark_factory, task_proxy)) { if (log.is_at_least_normal()) { log << "Initializing landmark sum heuristic..." << endl; } - initialize(lm_factory, prog_goal, prog_gn, prog_r); + initialize(landmark_factory, prog_goal, prog_gn, prog_r); compute_landmark_costs(); } @@ -74,9 +74,9 @@ void LandmarkSumHeuristic::compute_landmark_costs() { over all operators and use this cost for all derived landmarks. */ int min_operator_cost = task_properties::get_min_operator_cost(task_proxy); - min_first_achiever_costs.reserve(lm_graph->get_num_landmarks()); - min_possible_achiever_costs.reserve(lm_graph->get_num_landmarks()); - for (const auto &node : *lm_graph) { + min_first_achiever_costs.reserve(landmark_graph->get_num_landmarks()); + min_possible_achiever_costs.reserve(landmark_graph->get_num_landmarks()); + for (const auto &node : *landmark_graph) { if (node->get_landmark().is_derived) { min_first_achiever_costs.push_back(min_operator_cost); min_possible_achiever_costs.push_back(min_operator_cost); @@ -94,10 +94,10 @@ void LandmarkSumHeuristic::compute_landmark_costs() { int LandmarkSumHeuristic::get_heuristic_value(const State &ancestor_state) { int h = 0; ConstBitsetView past = - lm_status_manager->get_past_landmarks(ancestor_state); + landmark_status_manager->get_past_landmarks(ancestor_state); ConstBitsetView future = - lm_status_manager->get_future_landmarks(ancestor_state); - for (int id = 0; id < lm_graph->get_num_landmarks(); ++id) { + landmark_status_manager->get_future_landmarks(ancestor_state); + for (int id = 0; id < landmark_graph->get_num_landmarks(); ++id) { if (future.test(id)) { int min_achiever_cost = past.test(id) ? min_possible_achiever_costs[id] : min_first_achiever_costs[id]; diff --git a/src/search/landmarks/landmark_sum_heuristic.h b/src/search/landmarks/landmark_sum_heuristic.h index 9783f43bd3..1faf7f5389 100644 --- a/src/search/landmarks/landmark_sum_heuristic.h +++ b/src/search/landmarks/landmark_sum_heuristic.h @@ -17,7 +17,7 @@ class LandmarkSumHeuristic : public LandmarkHeuristic { int get_heuristic_value(const State &ancestor_state) override; public: LandmarkSumHeuristic( - const std::shared_ptr &lm_factory, bool pref, + const std::shared_ptr &landmark_factory, bool pref, bool prog_goal, bool prog_gn, bool prog_r, const std::shared_ptr &transform, bool cache_estimates, const std::string &description, diff --git a/src/search/landmarks/util.cc b/src/search/landmarks/util.cc index 5388eecf12..5b5848ca8f 100644 --- a/src/search/landmarks/util.cc +++ b/src/search/landmarks/util.cc @@ -30,10 +30,10 @@ unordered_map _intersect(const unordered_map &a, const unord return result; } -bool possibly_reaches_lm(const OperatorProxy &op, - const vector> &reached, - const Landmark &landmark) { - /* Check whether operator o can possibly make landmark lmp true in a +bool possibly_reaches_landmark(const OperatorProxy &op, + const vector> &reached, + const Landmark &landmark) { + /* Check whether operator o can possibly make `landmark` true in a relaxed task (as given by the reachability information in reached) */ assert(!reached.empty()); @@ -46,7 +46,7 @@ bool possibly_reaches_lm(const OperatorProxy &op, return false; // Go through all effects of o and check whether one can reach a - // proposition in lmp + // proposition in `landmark`. for (EffectProxy effect: op.get_effects()) { FactProxy effect_fact = effect.get_fact(); assert(!reached[effect_fact.get_variable().get_id()].empty()); diff --git a/src/search/landmarks/util.h b/src/search/landmarks/util.h index 1c6c3271fd..d0d6b2fb85 100644 --- a/src/search/landmarks/util.h +++ b/src/search/landmarks/util.h @@ -20,7 +20,7 @@ extern std::unordered_map _intersect( const std::unordered_map &a, const std::unordered_map &b); -extern bool possibly_reaches_lm( +extern bool possibly_reaches_landmark( const OperatorProxy &op, const std::vector> &reached, const Landmark &landmark); From 3ec216f0826db4bf9320155e34866523d7f75b15 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 13 Feb 2025 16:19:59 +0100 Subject: [PATCH 008/108] Fix minor styling issues. --- src/search/landmarks/exploration.cc | 3 ++- src/search/landmarks/landmark_factory_h_m.cc | 2 +- src/search/landmarks/landmark_factory_reasonable_orders_hps.cc | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/search/landmarks/exploration.cc b/src/search/landmarks/exploration.cc index fa4875bf28..f3a2c262fd 100644 --- a/src/search/landmarks/exploration.cc +++ b/src/search/landmarks/exploration.cc @@ -57,6 +57,7 @@ static int compute_number_of_unary_operators( void Exploration::build_unary_operators() { const OperatorsProxy operators = task_proxy.get_operators(); const AxiomsProxy axioms = task_proxy.get_axioms(); + /* We need to reserve memory for this vector because we cross-reference to the memory address of its elements while building it, meaning a resize @@ -77,7 +78,7 @@ static vector get_sorted_effect_conditions( const EffectProxy &effect) { vector effect_conditions; effect_conditions.reserve(effect.get_conditions().size()); - for (FactProxy effect_condition: effect.get_conditions()) { + for (FactProxy effect_condition : effect.get_conditions()) { effect_conditions.push_back(effect_condition.get_pair()); } sort(effect_conditions.begin(), effect_conditions.end()); diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index dc6d08f194..b19fb063e6 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -989,7 +989,7 @@ void LandmarkFactoryHM::generate_landmarks( set_minus(h_m_table_[f1].landmarks, h_m_table_[f1].necessary); } - // and add the orderings. + // add the orderings. for (int set_index : all_landmarks) { for (int landmark : h_m_table_[set_index].landmarks) { diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 1ec00f1509..2d02c638ac 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -37,7 +37,7 @@ void LandmarkFactoryReasonableOrdersHPS::generate_landmarks( void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orders( const TaskProxy &task_proxy) { /* - Approximate reasonable orders according to Hoffmann et al. (JAIR 2004). + Approximate reasonable orderings according to Hoffmann et al. (JAIR 2004). If node_p is in goal, then any node2_p which interferes with node_p can be reasonably ordered before node_p. Otherwise, if From 213b85d301b7639d233d601bd262ac76792f9cd7 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Fri, 14 Feb 2025 13:38:03 +0100 Subject: [PATCH 009/108] Make single-use local function static. --- .../landmarks/landmark_factory_relaxation.cc | 1 + src/search/landmarks/landmark_heuristic.cc | 85 ++++++++++--------- src/search/landmarks/landmark_heuristic.h | 11 +-- src/search/landmarks/util.h | 3 +- 4 files changed, 50 insertions(+), 50 deletions(-) diff --git a/src/search/landmarks/landmark_factory_relaxation.cc b/src/search/landmarks/landmark_factory_relaxation.cc index 847a0a1900..7919cc6db4 100644 --- a/src/search/landmarks/landmark_factory_relaxation.cc +++ b/src/search/landmarks/landmark_factory_relaxation.cc @@ -54,6 +54,7 @@ void LandmarkFactoryRelaxation::calc_achievers( achievers_calculated = true; } +// TODO: Move this to lm_exhaust and make it a static function. bool LandmarkFactoryRelaxation::relaxed_task_solvable( const TaskProxy &task_proxy, Exploration &exploration, const Landmark &landmark, const bool use_unary_relaxation) { diff --git a/src/search/landmarks/landmark_heuristic.cc b/src/search/landmarks/landmark_heuristic.cc index 971c1055df..d792c17b60 100644 --- a/src/search/landmarks/landmark_heuristic.cc +++ b/src/search/landmarks/landmark_heuristic.cc @@ -18,18 +18,58 @@ LandmarkHeuristic::LandmarkHeuristic( const shared_ptr &transform, bool cache_estimates, const string &description, utils::Verbosity verbosity) : Heuristic(transform, cache_estimates, description, verbosity), + initial_landmark_graph_has_cycle_of_natural_orderings(false), use_preferred_operators(use_preferred_operators), successor_generator(nullptr) { } +/* TODO: We would prefer the following two functions to be implemented + somewhere else as more generic graph algorithms. */ +static bool depth_first_search_for_cycle_of_natural_orderings( + const LandmarkNode &node, std::vector &closed, + std::vector &visited) { + int id = node.get_id(); + if (closed[id]) { + return false; + } else if (visited[id]) { + return true; + } + + visited[id] = true; + for (auto &child : node.children) { + if (child.second >= OrderingType::NATURAL) { + if (depth_first_search_for_cycle_of_natural_orderings( + *child.first, closed, visited)) { + return true; + } + } + } + closed[id] = true; + return false; +} + +static bool landmark_graph_has_cycle_of_natural_orderings( + const LandmarkGraph &landmark_graph) { + const int num_landmarks = landmark_graph.get_num_landmarks(); + vector closed(num_landmarks, false); + vector visited(num_landmarks, false); + for (const auto &node : landmark_graph) { + if (depth_first_search_for_cycle_of_natural_orderings( + *node, closed, visited)) { + return true; + } + } + return false; +} + void LandmarkHeuristic::initialize( const shared_ptr &landmark_factory, bool prog_goal, bool prog_gn, bool prog_r) { /* - Actually, we should test if this is the root task or a - task that *only* transforms costs and/or adds negated axioms. - However, there is currently no good way to do this, so we use - this incomplete, slightly less safe test. + Actually, we should test if this is the root task or a task that *only* + transforms costs and/or adds negated axioms. However, there is currently + no good way to do this, so we use this incomplete, slightly less safe + test. */ if (task != tasks::g_root_task && dynamic_cast(task.get()) == nullptr @@ -46,7 +86,7 @@ void LandmarkHeuristic::initialize( *landmark_graph, prog_goal, prog_gn, prog_r); initial_landmark_graph_has_cycle_of_natural_orderings = - landmark_graph_has_cycle_of_natural_orderings(); + landmark_graph_has_cycle_of_natural_orderings(*landmark_graph); if (initial_landmark_graph_has_cycle_of_natural_orderings && log.is_at_least_normal()) { log << "Landmark graph contains a cycle of natural orderings." << endl; @@ -62,41 +102,6 @@ void LandmarkHeuristic::initialize( } } -bool LandmarkHeuristic::landmark_graph_has_cycle_of_natural_orderings() { - int num_landmarks = landmark_graph->get_num_landmarks(); - vector closed(num_landmarks, false); - vector visited(num_landmarks, false); - for (const auto &node : *landmark_graph) { - if (depth_first_search_for_cycle_of_natural_orderings( - *node, closed, visited)) { - return true; - } - } - return false; -} - -bool LandmarkHeuristic::depth_first_search_for_cycle_of_natural_orderings( - const LandmarkNode &node, vector &closed, vector &visited) { - int id = node.get_id(); - if (closed[id]) { - return false; - } else if (visited[id]) { - return true; - } - - visited[id] = true; - for (auto &child : node.children) { - if (child.second >= OrderingType::NATURAL) { - if (depth_first_search_for_cycle_of_natural_orderings( - *child.first, closed, visited)) { - return true; - } - } - } - closed[id] = true; - return false; -} - void LandmarkHeuristic::compute_landmark_graph( const shared_ptr &landmark_factory) { utils::Timer landmark_graph_timer; diff --git a/src/search/landmarks/landmark_heuristic.h b/src/search/landmarks/landmark_heuristic.h index a677d20e43..8c66474665 100644 --- a/src/search/landmarks/landmark_heuristic.h +++ b/src/search/landmarks/landmark_heuristic.h @@ -21,16 +21,10 @@ class LandmarkStatusManager; class LandmarkHeuristic : public Heuristic { bool initial_landmark_graph_has_cycle_of_natural_orderings; - /* TODO: We would prefer the following two functions to be implemented - somewhere else as more generic graph algorithms. */ - bool landmark_graph_has_cycle_of_natural_orderings(); - bool depth_first_search_for_cycle_of_natural_orderings( - const LandmarkNode &node, std::vector &closed, - std::vector &visited); protected: std::shared_ptr landmark_graph; const bool use_preferred_operators; - // This map remains empty unless *use_preferred_operators* is true. + // This map remains empty unless `use_preferred_operators` is true. utils::HashMap> landmarks_achieved_by_atom; std::unique_ptr landmark_status_manager; @@ -73,8 +67,7 @@ extern void add_landmark_heuristic_options_to_feature( extern std::tuple, bool, bool, bool, bool, std::shared_ptr, bool, std::string, utils::Verbosity> -get_landmark_heuristic_arguments_from_options( - const plugins::Options &opts); +get_landmark_heuristic_arguments_from_options(const plugins::Options &opts); } #endif diff --git a/src/search/landmarks/util.h b/src/search/landmarks/util.h index d0d6b2fb85..732836268c 100644 --- a/src/search/landmarks/util.h +++ b/src/search/landmarks/util.h @@ -24,7 +24,8 @@ extern bool possibly_reaches_landmark( const OperatorProxy &op, const std::vector> &reached, const Landmark &landmark); -extern OperatorProxy get_operator_or_axiom(const TaskProxy &task_proxy, int op_or_axiom_id); +extern OperatorProxy get_operator_or_axiom( + const TaskProxy &task_proxy, int op_or_axiom_id); extern int get_operator_or_axiom_id(const OperatorProxy &op); extern void dump_landmark_graph( From 05c99ae91245781f1cf75cda3542d807c0e5e5df Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Fri, 14 Feb 2025 16:18:21 +0100 Subject: [PATCH 010/108] Minor cleanup. --- .../landmark_cost_partitioning_algorithms.cc | 13 ++--- .../landmark_cost_partitioning_algorithms.h | 5 +- .../landmark_cost_partitioning_heuristic.cc | 17 +++--- .../landmarks/landmark_sum_heuristic.cc | 54 +++++++++---------- src/search/landmarks/landmark_sum_heuristic.h | 6 +++ 5 files changed, 45 insertions(+), 50 deletions(-) diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc index d641bb03c6..3725090b83 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc @@ -22,20 +22,15 @@ CostPartitioningAlgorithm::CostPartitioningAlgorithm( : landmark_graph(graph), operator_costs(operator_costs) { } -const unordered_set &CostPartitioningAlgorithm::get_achievers( - const Landmark &landmark, bool past) const { +static const unordered_set &get_achievers( + const Landmark &landmark, const bool past) { // Return relevant achievers of the landmark according to its status. - if (past) { - return landmark.possible_achievers; - } else { - return landmark.first_achievers; - } + return past ? landmark.possible_achievers : landmark.first_achievers; } - UniformCostPartitioningAlgorithm::UniformCostPartitioningAlgorithm( const vector &operator_costs, const LandmarkGraph &graph, - bool use_action_landmarks) + const bool use_action_landmarks) : CostPartitioningAlgorithm(operator_costs, graph), use_action_landmarks(use_action_landmarks) { } diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.h b/src/search/landmarks/landmark_cost_partitioning_algorithms.h index c95b313ca4..882c68b095 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.h +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.h @@ -20,9 +20,6 @@ class CostPartitioningAlgorithm { protected: const LandmarkGraph &landmark_graph; const std::vector operator_costs; - - const std::unordered_set &get_achievers( - const Landmark &landmark, bool past) const; public: CostPartitioningAlgorithm(const std::vector &operator_costs, const LandmarkGraph &graph); @@ -35,6 +32,8 @@ class CostPartitioningAlgorithm { class UniformCostPartitioningAlgorithm : public CostPartitioningAlgorithm { bool use_action_landmarks; + + public: UniformCostPartitioningAlgorithm(const std::vector &operator_costs, const LandmarkGraph &graph, diff --git a/src/search/landmarks/landmark_cost_partitioning_heuristic.cc b/src/search/landmarks/landmark_cost_partitioning_heuristic.cc index c1d4133cc9..19dc36e29c 100644 --- a/src/search/landmarks/landmark_cost_partitioning_heuristic.cc +++ b/src/search/landmarks/landmark_cost_partitioning_heuristic.cc @@ -48,7 +48,7 @@ void LandmarkCostPartitioningHeuristic::check_unsupported_features( } void LandmarkCostPartitioningHeuristic::set_cost_partitioning_algorithm( - CostPartitioningMethod cost_partitioning, lp::LPSolverType lpsolver, + const CostPartitioningMethod cost_partitioning, lp::LPSolverType lpsolver, bool use_action_landmarks) { if (cost_partitioning == CostPartitioningMethod::OPTIMAL) { cost_partitioning_algorithm = @@ -67,7 +67,7 @@ void LandmarkCostPartitioningHeuristic::set_cost_partitioning_algorithm( int LandmarkCostPartitioningHeuristic::get_heuristic_value( const State &ancestor_state) { - double epsilon = 0.01; + constexpr double epsilon = 0.01; double h_val = cost_partitioning_algorithm->get_cost_partitioned_heuristic_value( @@ -144,14 +144,15 @@ class LandmarkCostPartitioningHeuristicFeature "which point the above inequality might not hold anymore."); document_note( "Optimal Cost Partitioning", - "To use ``cost_partitioning=optimal``, you must build the planner with LP " - "support. See [build instructions https://github.com/aibasel/downward/blob/main/BUILD.md]."); + "To use `cost_partitioning=optimal`, you must build the " + "planner with LP support. See " + "[build instructions https://github.com/aibasel/downward/blob/main/BUILD.md]."); document_note( "Preferred operators", - "Preferred operators should not be used for optimal planning. " - "See Evaluator#Landmark_sum_heuristic for more information " - "on using preferred operators; the comments there also apply " - "to this heuristic."); + "Preferred operators should not be used for optimal planning. See " + "Evaluator#Landmark_sum_heuristic for more information on using " + "preferred operators; the comments there also apply to this " + "heuristic."); document_language_support("action costs", "supported"); document_language_support( diff --git a/src/search/landmarks/landmark_sum_heuristic.cc b/src/search/landmarks/landmark_sum_heuristic.cc index 35f0a1eb03..2d8dd4d5fc 100644 --- a/src/search/landmarks/landmark_sum_heuristic.cc +++ b/src/search/landmarks/landmark_sum_heuristic.cc @@ -52,7 +52,7 @@ LandmarkSumHeuristic::LandmarkSumHeuristic( int LandmarkSumHeuristic::get_min_cost_of_achievers( const unordered_set &achievers) const { int min_cost = numeric_limits::max(); - for (int id : achievers) { + for (const int id : achievers) { OperatorProxy op = get_operator_or_axiom(task_proxy, id); min_cost = min(min_cost, op.get_cost()); } @@ -60,13 +60,6 @@ int LandmarkSumHeuristic::get_min_cost_of_achievers( } void LandmarkSumHeuristic::compute_landmark_costs() { - /* - This function runs under the assumption that landmark node IDs go - from 0 to the number of landmarks - 1, therefore the entry in - *min_first_achiever_costs* and *min_possible_achiever_costs* - at index i corresponds to the entry for the landmark node with ID i. - */ - /* For derived landmarks, we overapproximate that all operators are achievers. Since we do not want to explicitly store all operators @@ -99,8 +92,9 @@ int LandmarkSumHeuristic::get_heuristic_value(const State &ancestor_state) { landmark_status_manager->get_future_landmarks(ancestor_state); for (int id = 0; id < landmark_graph->get_num_landmarks(); ++id) { if (future.test(id)) { - int min_achiever_cost = past.test(id) ? min_possible_achiever_costs[id] - : min_first_achiever_costs[id]; + const int min_achiever_cost = + past.test(id) ? min_possible_achiever_costs[id] + : min_first_achiever_costs[id]; if (min_achiever_cost < numeric_limits::max()) { h += min_achiever_cost; } else { @@ -156,38 +150,38 @@ class LandmarkSumHeuristicFeature document_note( "Note on performance for satisficing planning", - "The cost of a landmark is based on the cost of the " - "operators that achieve it. For satisficing search this " - "can be counterproductive since it is often better to " - "focus on distance from goal (i.e. length of the plan) " - "rather than cost. In experiments we achieved the best " - "performance using the option 'transform=adapt_costs(one)' " - "to enforce unit costs."); + "The cost of a landmark is based on the cost of the operators that " + "achieve it. For satisficing search this can be counterproductive " + "since it is often better to focus on distance from goal (i.e. " + "length of the plan) rather than cost. In experiments we achieved " + "the best performance using the option " + "'transform=adapt_costs(one)' to enforce unit costs."); document_note( "Preferred operators", "Computing preferred operators is *only enabled* when setting " "pref=true because it has a nontrivial runtime cost. Using the " "heuristic for preferred operators without setting pref=true " "has no effect.\n" - "Our implementation to compute preferred operators based on landmarks " - "differs from the description in the literature (see reference above)." - "The original implementation computes two kinds of preferred " - "operators:\n\n" + "Our implementation to compute preferred operators based on " + "landmarks differs from the description in the literature (see " + "reference above). The original implementation computes two kinds " + "of preferred operators:\n\n" "+ If there is an applicable operator that reaches a landmark, all " "such operators are preferred.\n" "+ If no such operators exist, perform an FF-style relaxed " "exploration towards the nearest landmarks (according to the " "landmark orderings) and use the preferred operators of this " "exploration.\n\n\n" - "Our implementation only considers preferred operators of the first " - "type and does not include the second type. The rationale for this " - "change is that it reduces code complexity and helps more cleanly " - "separate landmark-based and FF-based computations in LAMA-like " - "planner configurations. In our experiments, only considering " - "preferred operators of the first type reduces performance when using " - "the heuristic and its preferred operators in isolation but improves " - "performance when using this heuristic in conjunction with the " - "FF heuristic, as in LAMA-like planner configurations."); + "Our implementation only considers preferred operators of the " + "first type and does not include the second type. The rationale " + "for this change is that it reduces code complexity and helps more " + "cleanly separate landmark-based and FF-based computations in " + "LAMA-like planner configurations. In our experiments, only " + "considering preferred operators of the first type reduces " + "performance when using the heuristic and its preferred operators " + "in isolation but improves performance when using this heuristic " + "in conjunction with the FF heuristic, as in LAMA-like planner " + "configurations."); document_language_support("action costs", "supported"); document_language_support( diff --git a/src/search/landmarks/landmark_sum_heuristic.h b/src/search/landmarks/landmark_sum_heuristic.h index 1faf7f5389..625a746e7b 100644 --- a/src/search/landmarks/landmark_sum_heuristic.h +++ b/src/search/landmarks/landmark_sum_heuristic.h @@ -7,6 +7,12 @@ namespace landmarks { class LandmarkSumHeuristic : public LandmarkHeuristic { const bool dead_ends_reliable; + /* + We compute landmark achiever costs under the assumption that landmark + node IDs go from 0 to the number of landmarks - 1, therefore the entry at + index i in the following vectors corresponds to the entry for the landmark + with ID i. + */ std::vector min_first_achiever_costs; std::vector min_possible_achiever_costs; From e9dfb7e0f5e28f7af8921111a5aa53b88d96c8d2 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Fri, 14 Feb 2025 18:02:29 +0100 Subject: [PATCH 011/108] Break apart functions in cost partitioning algorithms. --- .../landmark_cost_partitioning_algorithms.cc | 212 +++++++++++------- .../landmark_cost_partitioning_algorithms.h | 28 ++- 2 files changed, 162 insertions(+), 78 deletions(-) diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc index 3725090b83..b8bb1c374d 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc @@ -35,36 +35,28 @@ UniformCostPartitioningAlgorithm::UniformCostPartitioningAlgorithm( use_action_landmarks(use_action_landmarks) { } -double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( - const LandmarkStatusManager &landmark_status_manager, - const State &ancestor_state) { - vector landmarks_achieved_by_operator(operator_costs.size(), 0); - vector action_landmarks(operator_costs.size(), false); - - ConstBitsetView past = - landmark_status_manager.get_past_landmarks(ancestor_state); - ConstBitsetView future = - landmark_status_manager.get_future_landmarks(ancestor_state); - - double h = 0; - - /* First pass: - compute which op achieves how many landmarks. Along the way, - mark action landmarks and add their cost to h. */ +/* Compute which operator achieves how many landmarks. Along the way, mark + action landmarks and sum up their costs. */ +double UniformCostPartitioningAlgorithm::first_pass( + vector &landmarks_achieved_by_operator, + vector &action_landmarks, + ConstBitsetView &past, ConstBitsetView &future) { + double cost_action_landmarks = 0; for (const auto &node : landmark_graph) { int id = node->get_id(); if (future.test(id)) { const unordered_set &achievers = get_achievers(node->get_landmark(), past.test(id)); - if (achievers.empty()) + if (achievers.empty()) { return numeric_limits::max(); + } if (use_action_landmarks && achievers.size() == 1) { // We have found an action landmark for this state. int op_id = *achievers.begin(); if (!action_landmarks[op_id]) { action_landmarks[op_id] = true; assert(utils::in_bounds(op_id, operator_costs)); - h += operator_costs[op_id]; + cost_action_landmarks += operator_costs[op_id]; } } else { for (int op_id : achievers) { @@ -74,16 +66,19 @@ double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( } } } + return cost_action_landmarks; +} - /* TODO: Replace with Landmarks (to do so, we need some way to access the - status of a Landmark without access to the ID, which is part of - LandmarkNode). */ - vector relevant_landmarks; - - /* Second pass: - remove landmarks from consideration that are covered by - an action landmark; decrease the counters accordingly - so that no unnecessary cost is assigned to these landmarks. */ +/* + Collect all landmarks that are not covered by action landmarks. For all + landmarks that are covered, reduce the number of landmarks achieved by their + achievers to strengthen the cost partitioning. +*/ +vector UniformCostPartitioningAlgorithm::second_pass( + vector &landmarks_achieved_by_operator, + const vector &action_landmarks, + ConstBitsetView &past, ConstBitsetView &future) { + vector uncovered_landmarks; for (const auto &node : landmark_graph) { int id = node->get_id(); if (future.test(id)) { @@ -99,21 +94,29 @@ double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( } if (covered_by_action_landmark) { for (int op_id : achievers) { - assert(utils::in_bounds(op_id, landmarks_achieved_by_operator)); + assert(utils::in_bounds( + op_id, landmarks_achieved_by_operator)); --landmarks_achieved_by_operator[op_id]; } } else { - relevant_landmarks.push_back(node.get()); + uncovered_landmarks.push_back(node.get()); } } } + return uncovered_landmarks; +} - /* Third pass: - count shared costs for the remaining landmarks. */ - for (const LandmarkNode *node : relevant_landmarks) { +// Compute the cost partitioning. +double UniformCostPartitioningAlgorithm::third_pass( + const vector &uncovered_landmarks, + const vector &landmarks_achieved_by_operator, + ConstBitsetView &past, ConstBitsetView &future) { + double cost = 0; + for (const LandmarkNode *node : uncovered_landmarks) { // TODO: Iterate over Landmarks instead of LandmarkNodes int id = node->get_id(); assert(future.test(id)); + utils::unused_variable(future); const unordered_set &achievers = get_achievers(node->get_landmark(), past.test(id)); double min_cost = numeric_limits::max(); @@ -126,10 +129,40 @@ double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( static_cast(operator_costs[op_id]) / num_achieved; min_cost = min(min_cost, partitioned_cost); } - h += min_cost; + cost += min_cost; + } + return cost; +} + +double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( + const LandmarkStatusManager &landmark_status_manager, + const State &ancestor_state) { + vector landmarks_achieved_by_operator(operator_costs.size(), 0); + vector action_landmarks(operator_costs.size(), false); + + ConstBitsetView past = + landmark_status_manager.get_past_landmarks(ancestor_state); + ConstBitsetView future = + landmark_status_manager.get_future_landmarks(ancestor_state); + + const double cost_of_action_landmarks = first_pass( + landmarks_achieved_by_operator, action_landmarks, past, future); + if (cost_of_action_landmarks == numeric_limits::max()) { + return cost_of_action_landmarks; } - return h; + /* + TODO: Replace with Landmarks (to do so, we need some way to access the + status of a Landmark without access to the ID, which is part of + LandmarkNode). + */ + const vector uncovered_landmarks = second_pass( + landmarks_achieved_by_operator, action_landmarks, past, future); + + const double cost_partitioning_cost = third_pass( + uncovered_landmarks, landmarks_achieved_by_operator, past, future); + + return cost_of_action_landmarks + cost_partitioning_cost; } @@ -144,19 +177,23 @@ OptimalCostPartitioningAlgorithm::OptimalCostPartitioningAlgorithm( lp::LinearProgram OptimalCostPartitioningAlgorithm::build_initial_lp() { /* The LP has one variable (column) per landmark and one inequality (row) per operator. */ - int num_cols = landmark_graph.get_num_landmarks(); - int num_rows = operator_costs.size(); + const int num_cols = landmark_graph.get_num_landmarks(); + const int num_rows = operator_costs.size(); named_vector::NamedVector lp_variables; - /* We want to maximize 1 * cost(lm_1) + ... + 1 * cost(lm_n), - so the coefficients are all 1. - Variable bounds are state-dependent; we initialize the range to {0}. */ + /* + We want to maximize 1 * cost(lm_1) + ... + 1 * cost(lm_n), so the + coefficients are all 1. + Variable bounds are state-dependent; we initialize the range to {0}. + */ lp_variables.resize(num_cols, lp::LPVariable(0.0, 0.0, 1.0)); - /* Set up lower bounds and upper bounds for the inequalities. - These simply say that the operator's total cost must fall - between 0 and the real operator cost. */ + /* + Set up lower bounds and upper bounds for the inequalities. These simply + say that the operator's total cost must fall between 0 and the real + operator cost. + */ lp_constraints.resize(num_rows, lp::LPConstraint(0.0, 0.0)); for (size_t op_id = 0; op_id < operator_costs.size(); ++op_id) { lp_constraints[op_id].set_lower_bound(0); @@ -169,24 +206,13 @@ lp::LinearProgram OptimalCostPartitioningAlgorithm::build_initial_lp() { {}, lp_solver.get_infinity()); } -double OptimalCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( - const LandmarkStatusManager &landmark_status_manager, - const State &ancestor_state) { - /* TODO: We could also do the same thing with action landmarks we - do in the uniform cost partitioning case. */ - - - ConstBitsetView past = - landmark_status_manager.get_past_landmarks(ancestor_state); - ConstBitsetView future = - landmark_status_manager.get_future_landmarks(ancestor_state); - /* - Set up LP variable bounds for the landmarks. - The range of cost(lm_1) is {0} if the landmark is already - reached; otherwise it is [0, infinity]. - The lower bounds are set to 0 in the constructor and never change. - */ - int num_cols = landmark_graph.get_num_landmarks(); +/* + Set up LP variable bounds for the landmarks. The range of cost(lm_1) is {0} if + the landmark is already reached; otherwise it is [0, infinity]. The lower + bounds are set to 0 in the constructor and never change. +*/ +void OptimalCostPartitioningAlgorithm::set_lp_bounds( + ConstBitsetView &future, const int num_cols) { for (int id = 0; id < num_cols; ++id) { if (future.test(id)) { lp.get_variables()[id].upper_bound = lp_solver.get_infinity(); @@ -194,15 +220,19 @@ double OptimalCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( lp.get_variables()[id].upper_bound = 0; } } +} - /* - Define the constraint matrix. The constraints are of the form - cost(lm_i1) + cost(lm_i2) + ... + cost(lm_in) <= cost(o) - where lm_i1 ... lm_in are the landmarks for which o is a - relevant achiever. Hence, we add a triple (op, lm, 1.0) - for each relevant achiever op of landmark lm, denoting that - in the op-th row and lm-th column, the matrix has a 1.0 entry. - */ +/* + Define the constraint matrix. The constraints are of the form + cost(lm_i1) + cost(lm_i2) + ... + cost(lm_in) <= cost(o) + where lm_i1 ... lm_in are the landmarks for which o is a relevant achiever. + Hence, we add a triple (op, lm, 1.0) for each relevant achiever op of + landmark lm, denoting that in the op-th row and lm-th column, the matrix has + a 1.0 entry. + Returns true if the current state is a dead-end. +*/ +bool OptimalCostPartitioningAlgorithm::define_constraint_matrix( + ConstBitsetView &past, ConstBitsetView &future, const int num_cols) { // Reuse previous constraint objects to save the effort of recreating them. for (lp::LPConstraint &constraint : lp_constraints) { constraint.clear(); @@ -212,33 +242,61 @@ double OptimalCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( if (future.test(id)) { const unordered_set &achievers = get_achievers(landmark, past.test(id)); - if (achievers.empty()) - return numeric_limits::max(); + /* + TODO: We could deal with things more uniformly by just adding a + constraint with no variables because there are no achievers, + which would then be detected as an unsolvable constraint by the + LP solver. However, as of now this does not work because + `get_cost_partitioned_heuristic_value` only adds non-empty + constraints to the LP. We should implement this differently, + which requires a solution that does not reuse constraints from + the previous iteration as it does now. + */ + if (achievers.empty()) { + return true; + } for (int op_id : achievers) { assert(utils::in_bounds(op_id, lp_constraints)); lp_constraints[op_id].insert(id, 1.0); } } } + return false; +} + + +double OptimalCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( + const LandmarkStatusManager &landmark_status_manager, + const State &ancestor_state) { + /* TODO: We could also do the same thing with action landmarks we do in the + uniform cost partitioning case. */ + + ConstBitsetView past = + landmark_status_manager.get_past_landmarks(ancestor_state); + ConstBitsetView future = + landmark_status_manager.get_future_landmarks(ancestor_state); + + const int num_cols = landmark_graph.get_num_landmarks(); + set_lp_bounds(future, num_cols); + const bool dead_end = define_constraint_matrix(past, future, num_cols); + if (dead_end) { + return numeric_limits::max(); + } /* Copy non-empty constraints and use those in the LP. This significantly speeds up the heuristic calculation. See issue443. */ // TODO: do not copy the data here. lp.get_constraints().clear(); for (const lp::LPConstraint &constraint : lp_constraints) { - if (!constraint.empty()) + if (!constraint.empty()) { lp.get_constraints().push_back(constraint); + } } - // Load the problem into the LP solver. lp_solver.load_problem(lp); - - // Solve the linear program. lp_solver.solve(); assert(lp_solver.has_optimal_solution()); - double h = lp_solver.get_objective_value(); - - return h; + return lp_solver.get_objective_value(); } } diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.h b/src/search/landmarks/landmark_cost_partitioning_algorithms.h index 882c68b095..65d70dce7d 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.h +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.h @@ -8,6 +8,9 @@ #include #include +#include "../per_state_bitset.h" + +class ConstBitsetView; class OperatorsProxy; namespace landmarks { @@ -33,7 +36,27 @@ class CostPartitioningAlgorithm { class UniformCostPartitioningAlgorithm : public CostPartitioningAlgorithm { bool use_action_landmarks; - + /* + TODO: We are aware that the following three function names are not + meaningful descriptions of what they do. We introduced these functions + in issue992 in an attempt to make the code more readable (e.g., by + breaking apart long functions) without changing its behavior. Since we + would like to implement computing the cost partitioning differently, and + because these functions do not have just one simple purpose, we did not + bother trying to find descriptive function names. + */ + double first_pass( + std::vector &landmarks_achieved_by_operator, + std::vector &action_landmarks, + ConstBitsetView &past, ConstBitsetView &future); + std::vector second_pass( + std::vector &landmarks_achieved_by_operator, + const std::vector &action_landmarks, ConstBitsetView &past, + ConstBitsetView &future); + double third_pass( + const std::vector &uncovered_landmarks, + const std::vector &landmarks_achieved_by_operator, + ConstBitsetView &past, ConstBitsetView &future); public: UniformCostPartitioningAlgorithm(const std::vector &operator_costs, const LandmarkGraph &graph, @@ -58,6 +81,9 @@ class OptimalCostPartitioningAlgorithm : public CostPartitioningAlgorithm { lp::LinearProgram lp; lp::LinearProgram build_initial_lp(); + void set_lp_bounds(ConstBitsetView &future, int num_cols); + bool define_constraint_matrix( + ConstBitsetView &past, ConstBitsetView &future, int num_cols); public: OptimalCostPartitioningAlgorithm(const std::vector &operator_costs, const LandmarkGraph &graph, From c7ae6cca6170eef597588feaa94cb2cccfb108ac Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Mon, 17 Feb 2025 15:35:07 +0100 Subject: [PATCH 012/108] Fix computation of sorted preconditions in exploration. --- src/search/landmarks/exploration.cc | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/search/landmarks/exploration.cc b/src/search/landmarks/exploration.cc index fa4875bf28..7a7892032c 100644 --- a/src/search/landmarks/exploration.cc +++ b/src/search/landmarks/exploration.cc @@ -91,9 +91,8 @@ static vector get_sorted_extended_preconditions( assert(is_sorted(preconditions.begin(), preconditions.end())); vector effect_conditions = get_sorted_effect_conditions(effect); - vector extended_preconditions; - extended_preconditions.reserve( - preconditions.size() + effect_conditions.size()); + vector extended_preconditions( + preconditions.size() + effect_conditions.size(), FactPair::no_fact); merge(preconditions.begin(), preconditions.end(), effect_conditions.begin(), effect_conditions.end(), extended_preconditions.begin()); assert(is_sorted( From ee7692d450b32c286fd4891f0dd18e476e8314d8 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Fri, 21 Feb 2025 09:29:52 +0100 Subject: [PATCH 013/108] Implement review comments. --- .../landmark_cost_partitioning_algorithms.cc | 6 +- .../landmark_cost_partitioning_heuristic.cc | 2 +- .../landmarks/landmark_factory_rpg_sasp.cc | 56 +++++++++---------- .../landmarks/landmark_factory_zhu_givan.cc | 4 +- 4 files changed, 34 insertions(+), 34 deletions(-) diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc index b8bb1c374d..c3842586a9 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc @@ -41,7 +41,7 @@ double UniformCostPartitioningAlgorithm::first_pass( vector &landmarks_achieved_by_operator, vector &action_landmarks, ConstBitsetView &past, ConstBitsetView &future) { - double cost_action_landmarks = 0; + double action_landmarks_cost = 0; for (const auto &node : landmark_graph) { int id = node->get_id(); if (future.test(id)) { @@ -56,7 +56,7 @@ double UniformCostPartitioningAlgorithm::first_pass( if (!action_landmarks[op_id]) { action_landmarks[op_id] = true; assert(utils::in_bounds(op_id, operator_costs)); - cost_action_landmarks += operator_costs[op_id]; + action_landmarks_cost += operator_costs[op_id]; } } else { for (int op_id : achievers) { @@ -66,7 +66,7 @@ double UniformCostPartitioningAlgorithm::first_pass( } } } - return cost_action_landmarks; + return action_landmarks_cost; } /* diff --git a/src/search/landmarks/landmark_cost_partitioning_heuristic.cc b/src/search/landmarks/landmark_cost_partitioning_heuristic.cc index 19dc36e29c..d88fc5b3fa 100644 --- a/src/search/landmarks/landmark_cost_partitioning_heuristic.cc +++ b/src/search/landmarks/landmark_cost_partitioning_heuristic.cc @@ -144,7 +144,7 @@ class LandmarkCostPartitioningHeuristicFeature "which point the above inequality might not hold anymore."); document_note( "Optimal Cost Partitioning", - "To use `cost_partitioning=optimal`, you must build the " + "To use ``cost_partitioning=optimal``, you must build the " "planner with LP support. See " "[build instructions https://github.com/aibasel/downward/blob/main/BUILD.md]."); document_note( diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 0e0e2db177..93253990c1 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -71,13 +71,15 @@ void LandmarkFactoryRpgSasp::add_dtg_successor(int var_id, int pre, int post) { dtg_successors[var_id][pre].insert(post); } +/* + Compute a subset of the actual preconditions of `op` for achieving `landmark`. + It takes into account operator preconditions, but only reports those effect + conditions that are true for ALL effects achieving the landmark. + TODO: Make this comment clearer. +*/ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_landmark( const TaskProxy &task_proxy, const Landmark &landmark, const OperatorProxy &op, unordered_map &result) const { - // Computes a subset of the actual preconditions of o for achieving lmp - takes into account - // operator preconditions, but only reports those effect conditions that are true for ALL - // effects achieving the LM. - vector has_precondition_on_var(task_proxy.get_variables().size(), false); for (FactProxy precondition : op.get_preconditions()) { result.emplace(precondition.get_variable().get_id(), precondition.get_value()); @@ -105,7 +107,7 @@ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_landmark( } } - // Check for lmp in conditional effects + // Check if `landmark` could be achieved by conditional effects. unordered_set achievable_atom_indices; for (EffectProxy effect : effects) { FactProxy effect_fact = effect.get_fact(); @@ -207,14 +209,12 @@ void LandmarkFactoryRpgSasp::found_disjunctive_landmark_and_ordering( const TaskProxy &task_proxy, const utils::HashSet &atoms, LandmarkNode &node, OrderingType type) { bool simple_landmark_exists = false; - // TODO: assign with FactPair::no_fact State initial_state = task_proxy.get_initial_state(); for (const FactPair &atom : atoms) { if (initial_state[atom.var].get_value() == atom.value) { return; } if (landmark_graph->contains_simple_landmark(atom)) { - // Propositions in this disj. LM exist already as simple LMs. simple_landmark_exists = true; break; } @@ -225,16 +225,16 @@ void LandmarkFactoryRpgSasp::found_disjunctive_landmark_and_ordering( return; } else if (landmark_graph->contains_overlapping_disjunctive_landmark(atoms)) { if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) { - // LM already exists, just add order. new_landmark_node = &landmark_graph->get_disjunctive_landmark_node(*atoms.begin()); add_ordering(*new_landmark_node, node, type); return; } - // LM overlaps with existing disj. LM, do not add. + // Landmark overlaps with existing disjunctive landmark, do not add. return; } - // This LM and no part of it exist, add the LM to the landmarks graph. + /* None of the atoms in this landmark occur in an existing landmark, so + we add the landmark to the landmark graph. */ Landmark landmark(vector(atoms.begin(), atoms.end()), true, false); new_landmark_node = &landmark_graph->add_landmark(move(landmark)); @@ -487,10 +487,8 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orders( VariablesProxy variables = task_proxy.get_variables(); find_forward_orders(variables, reached, node); - /* - Use domain transition graphs to find further orders. Only possible - if landmark is a simple. - */ + /* Use domain transition graphs to find further orders. Only possible + if landmark is simple. */ const Landmark &landmark = node->get_landmark(); if (landmark.is_disjunctive) return; @@ -567,30 +565,31 @@ void LandmarkFactoryRpgSasp::find_forward_orders( const VariablesProxy &variables, const vector> &reached, LandmarkNode *node) { /* - `node` is ordered before any var-val pair that cannot be reached before - `node` according to relaxed planning graph (as captured in reached). - These orders are saved in the node member variable `forward_orders`. + The landmark of `node` is ordered before any atom that cannot be reached + before the landmark of `node` according to relaxed planning graph (as + captured in `reached`). These orderings are saved in the `forward_orders` + and added to the landmark graph in `add_landmark_forward_orderings`. */ - for (VariableProxy var : variables) + for (VariableProxy var : variables) { for (int value = 0; value < var.get_domain_size(); ++value) { if (reached[var.get_id()][value]) continue; - const FactPair fact(var.get_id(), value); + const FactPair atom(var.get_id(), value); bool insert = true; - for (const FactPair &atom : node->get_landmark().atoms) { - if (fact != atom) { + for (const FactPair &landmark_atom : node->get_landmark().atoms) { + if (atom != landmark_atom) { /* Make sure there is no operator that reaches both `atom` and (var, value) at the same time. */ bool intersection_empty = true; - const vector &reach_fact = - get_operators_including_eff(fact); - const vector &achievers = + const vector &atom_achievers = get_operators_including_eff(atom); - for (size_t j = 0; j < reach_fact.size() && intersection_empty; ++j) - for (size_t k = 0; k < achievers.size() + const vector &landmark_achievers = + get_operators_including_eff(landmark_atom); + for (size_t j = 0; j < atom_achievers.size() && intersection_empty; ++j) + for (size_t k = 0; k < landmark_achievers.size() && intersection_empty; ++k) - if (reach_fact[j] == achievers[k]) + if (atom_achievers[j] == landmark_achievers[k]) intersection_empty = false; if (!intersection_empty) { @@ -603,8 +602,9 @@ void LandmarkFactoryRpgSasp::find_forward_orders( } } if (insert) - forward_orders[node].insert(fact); + forward_orders[node].insert(atom); } + } } void LandmarkFactoryRpgSasp::add_landmark_forward_orderings() { diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index b9abf1981f..8f3a001925 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -192,8 +192,8 @@ static LandmarkSet _intersection(const LandmarkSet &a, const LandmarkSet &b) { return result; } -LandmarkSet LandmarkFactoryZhuGivan::union_of_precondition_labels(const OperatorProxy &op, - const PropositionLayer ¤t) const { +LandmarkSet LandmarkFactoryZhuGivan::union_of_precondition_labels( + const OperatorProxy &op, const PropositionLayer ¤t) const { LandmarkSet result; // TODO This looks like an O(n^2) algorithm where O(n log n) would do, a From d373aeda8590a7196cf45ccc55c8da64a6132285 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Fri, 28 Feb 2025 09:08:45 +0100 Subject: [PATCH 014/108] Minor revisions to fix runtime profile. --- src/search/landmarks/exploration.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/search/landmarks/exploration.cc b/src/search/landmarks/exploration.cc index 7a7892032c..721f8669b3 100644 --- a/src/search/landmarks/exploration.cc +++ b/src/search/landmarks/exploration.cc @@ -145,7 +145,7 @@ void Exploration::reset_reachability_information() { } void Exploration::set_state_atoms_reached(const State &state) { - for (FactProxy atom : state) { + for (const FactProxy &atom : state) { Proposition *init_prop = &propositions[atom.get_variable().get_id()][atom.get_value()]; if (!init_prop->excluded) { @@ -178,9 +178,9 @@ unordered_set Exploration::get_excluded_operators( unordered_set excluded_op_ids; for (OperatorProxy op : task_proxy.get_operators()) { for (EffectProxy effect : op.get_effects()) { + auto [var, value] = effect.get_fact().get_pair(); if (effect.get_conditions().empty() - && propositions[effect.get_fact().get_variable().get_id()] - [effect.get_fact().get_value()].excluded) { + && propositions[var][value].excluded) { excluded_op_ids.insert(op.get_id()); break; } From 9024bb0b0a5b6b298308bb0677545b75d8721450 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Mon, 3 Mar 2025 16:45:43 +0100 Subject: [PATCH 015/108] Fix recognition of identical landmarks and driveby cleanup. --- .../subtask_generators.cc | 5 ++-- .../cartesian_abstractions/utils_landmarks.cc | 8 +++--- src/search/landmarks/exploration.cc | 27 ++++++++++--------- src/search/landmarks/exploration.h | 13 +++++---- src/search/landmarks/landmark_factory.h | 4 --- src/search/landmarks/landmark_factory_h_m.cc | 3 +++ src/search/landmarks/landmark_factory_h_m.h | 2 ++ .../landmarks/landmark_factory_merged.cc | 2 +- .../landmark_factory_reasonable_orders_hps.cc | 3 +++ .../landmarks/landmark_factory_rpg_sasp.cc | 8 +++--- .../landmarks/landmark_factory_rpg_sasp.h | 5 ++-- src/search/landmarks/landmark_graph.cc | 9 ++++--- src/search/landmarks/landmark_graph.h | 5 ++-- src/search/landmarks/util.cc | 12 ++++----- 14 files changed, 57 insertions(+), 49 deletions(-) diff --git a/src/search/cartesian_abstractions/subtask_generators.cc b/src/search/cartesian_abstractions/subtask_generators.cc index 7bf2206202..d5fe592802 100644 --- a/src/search/cartesian_abstractions/subtask_generators.cc +++ b/src/search/cartesian_abstractions/subtask_generators.cc @@ -18,7 +18,6 @@ #include #include #include -#include #include using namespace std; @@ -158,7 +157,7 @@ SharedTasks LandmarkDecomposition::get_subtasks( SharedTasks subtasks; const shared_ptr landmark_graph = get_landmark_graph(task); - utils::HashMap fact_to_landmark_map = + utils::HashMap atom_to_landmark_map = get_atom_to_landmark_map(landmark_graph); Facts landmark_facts = get_atom_landmarks(*landmark_graph); filter_and_order_facts(task, fact_order, landmark_facts, *rng, log); @@ -167,7 +166,7 @@ SharedTasks LandmarkDecomposition::get_subtasks( make_shared(task, Facts {landmark}); if (combine_facts) { subtask = build_domain_abstracted_task( - subtask, fact_to_landmark_map[landmark]); + subtask, atom_to_landmark_map[landmark]); } subtasks.push_back(subtask); } diff --git a/src/search/cartesian_abstractions/utils_landmarks.cc b/src/search/cartesian_abstractions/utils_landmarks.cc index d962bb3a1e..ddbd76ca06 100644 --- a/src/search/cartesian_abstractions/utils_landmarks.cc +++ b/src/search/cartesian_abstractions/utils_landmarks.cc @@ -8,6 +8,7 @@ #include "../utils/memory.h" #include +#include using namespace std; using namespace landmarks; @@ -56,18 +57,19 @@ VarToValues get_prev_landmarks(const LandmarkNode *node) { vector open; unordered_set closed; open.reserve(node->parents.size()); - for (const auto &[parent, type] : node->parents) { + for (const LandmarkNode *parent : views::keys(node->parents)) { open.push_back(parent); } while (!open.empty()) { const LandmarkNode *ancestor = open.back(); open.pop_back(); - if (closed.find(ancestor) != closed.end()) + if (closed.contains(ancestor)) { continue; + } closed.insert(ancestor); FactPair ancestor_atom = get_atom(ancestor->get_landmark()); groups[ancestor_atom.var].push_back(ancestor_atom.value); - for (const auto &[parent, type] : ancestor->parents) { + for (const LandmarkNode *parent : views::keys(ancestor->parents)) { open.push_back(parent); } } diff --git a/src/search/landmarks/exploration.cc b/src/search/landmarks/exploration.cc index 721f8669b3..324496e78a 100644 --- a/src/search/landmarks/exploration.cc +++ b/src/search/landmarks/exploration.cc @@ -13,6 +13,8 @@ using namespace std; namespace landmarks { /* + TODO: Verify this comment. + Implementation note: Compared to RelaxationHeuristic, we *cannot simplify* unary operators, because this may conflict with excluded operators. For an example, consider that unary operator o1 is thrown out during @@ -27,14 +29,13 @@ Exploration::Exploration(const TaskProxy &task_proxy, utils::LogProxy &log) if (log.is_at_least_normal()) { log << "Initializing Exploration..." << endl; } - build_propositions(); build_unary_operators(); } void Exploration::build_propositions() { for (VariableProxy var : task_proxy.get_variables()) { - const int var_id = var.get_id(); + int var_id = var.get_id(); propositions.emplace_back(var.get_domain_size()); for (int value = 0; value < var.get_domain_size(); ++value) { propositions[var_id][value].fact = FactPair(var_id, value); @@ -55,8 +56,8 @@ static int compute_number_of_unary_operators( } void Exploration::build_unary_operators() { - const OperatorsProxy operators = task_proxy.get_operators(); - const AxiomsProxy axioms = task_proxy.get_axioms(); + const OperatorsProxy &operators = task_proxy.get_operators(); + const AxiomsProxy &axioms = task_proxy.get_axioms(); /* We need to reserve memory for this vector because we cross-reference to the memory address of its elements while building it, meaning a resize @@ -117,6 +118,7 @@ void Exploration::build_unary_operators(const OperatorProxy &op) { vector preconditions; int op_or_axiom_id = get_operator_or_axiom_id(op); + // TODO: Maybe the problem is with the new sorting? for (FactProxy pre : op.get_preconditions()) { preconditions.push_back(pre.get_pair()); } @@ -159,8 +161,8 @@ void Exploration::set_state_atoms_reached(const State &state) { excluded proposition *unconditionally* must be marked as excluded. Note that we in general cannot exclude all unary operators derived from - operators that achieve an excluded propositon *conditionally*: - Given an operator with uncoditional effect e1 and conditional effect e2 with + operators that achieve an excluded proposition *conditionally*: + Given an operator with unconditional effect e1 and conditional effect e2 with condition c yields unary operators uo1: {} -> e1 and uo2: c -> e2. Excluding both would not allow us to achieve e1 when excluding proposition e2. We instead only mark uo2 as excluded (see in `initialize_operator_data` when @@ -168,7 +170,7 @@ void Exploration::set_state_atoms_reached(const State &state) { overapproximation, e.g. if the effect e1 also has condition c. */ unordered_set Exploration::get_excluded_operators( - const bool use_unary_relaxation) const { + bool use_unary_relaxation) const { /* When using unary relaxation, we only exclude unary operators but none of the original operators which have an undesired side effect. */ if (use_unary_relaxation) { @@ -189,8 +191,8 @@ unordered_set Exploration::get_excluded_operators( return excluded_op_ids; } -void Exploration::initialize_operator_data(const bool use_unary_relaxation) { - const unordered_set excluded_op_ids = +void Exploration::initialize_operator_data(bool use_unary_relaxation) { + unordered_set excluded_op_ids = get_excluded_operators(use_unary_relaxation); for (UnaryOperator &op : unary_operators) { @@ -198,7 +200,7 @@ void Exploration::initialize_operator_data(const bool use_unary_relaxation) { /* Aside from UnaryOperators derived from operators with an id in - op_ids_to_mark we also exclude UnaryOperators that have an excluded + `excluded_op_ids` we also exclude UnaryOperators that have an excluded proposition as effect (see comment for `get_excluded_operators`). */ if (op.effect->excluded @@ -250,8 +252,9 @@ void Exploration::relaxed_exploration() { const vector &triggered_operators = prop->precondition_of; for (UnaryOperator *unary_op : triggered_operators) { - if (unary_op->excluded) + if (unary_op->excluded) { continue; + } --unary_op->num_unsatisfied_preconditions; assert(unary_op->num_unsatisfied_preconditions >= 0); if (unary_op->num_unsatisfied_preconditions == 0) { @@ -283,7 +286,7 @@ vector> Exploration::bundle_reachability_information() const { } vector> Exploration::compute_relaxed_reachability( - const vector &excluded_props, const bool use_unary_relaxation) { + const vector &excluded_props, bool use_unary_relaxation) { setup_exploration_queue(task_proxy.get_initial_state(), excluded_props, use_unary_relaxation); relaxed_exploration(); diff --git a/src/search/landmarks/exploration.h b/src/search/landmarks/exploration.h index 1b9f51e9d3..b668f91be3 100644 --- a/src/search/landmarks/exploration.h +++ b/src/search/landmarks/exploration.h @@ -80,13 +80,12 @@ class Exploration { Exploration(const TaskProxy &task_proxy, utils::LogProxy &log); /* - Computes the reachability of each proposition when excluding - operators in *excluded_op_ids* and ensuring that propositions - in *excluded_props* are not achieved. - The returned vector of vector denotes for each proposition - (grouped by their fact variable) whether it is relaxed reachable. - The values are exact in the absence of conditional effects, otherwise - they are an admissible approximation (see implementation for details). + Computes the reachability of each proposition when ensuring that + propositions in `excluded_props` are not achieved. The returned vector of + vector denotes for each proposition (grouped by their atom variable) + whether it is relaxed reachable. The values are exact in the absence of + conditional effects, otherwise they are an admissible approximation (see + implementation for details). */ std::vector> compute_relaxed_reachability( const std::vector &excluded_props, bool use_unary_relaxation); diff --git a/src/search/landmarks/landmark_factory.h b/src/search/landmarks/landmark_factory.h index 6165c76a31..963a62731e 100644 --- a/src/search/landmarks/landmark_factory.h +++ b/src/search/landmarks/landmark_factory.h @@ -5,12 +5,8 @@ #include "../utils/logging.h" -#include #include #include -#include -#include -#include #include class TaskProxy; diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 4c5135a5c7..c822095e71 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -11,6 +11,9 @@ #include "../utils/logging.h" #include "../utils/system.h" +#include +#include + using namespace std; using utils::ExitCode; diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 9323075cc0..0528132395 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -3,6 +3,8 @@ #include "landmark_factory.h" +#include + namespace landmarks { using FluentSet = std::vector; diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index 2dd2bfeb8b..7a2a51e92a 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -31,7 +31,7 @@ LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( else return nullptr; } else if (landmark.is_disjunctive) { - const utils::HashSet atoms( + set atoms( landmark.atoms.begin(), landmark.atoms.end()); if (lm_graph->contains_identical_disjunctive_landmark(atoms)) return &lm_graph->get_disjunctive_landmark_node(landmark.atoms[0]); diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 5fc9cbb671..8b83a0f0d7 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -9,6 +9,9 @@ #include "../utils/logging.h" #include "../utils/markup.h" +#include +#include + using namespace std; namespace landmarks { LandmarkFactoryReasonableOrdersHPS::LandmarkFactoryReasonableOrdersHPS( diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 80137053f5..203bb2fc4d 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -201,7 +201,7 @@ void LandmarkFactoryRpgSasp::found_simple_lm_and_order( } void LandmarkFactoryRpgSasp::found_disj_lm_and_order( - const TaskProxy &task_proxy, const utils::HashSet &atoms, + const TaskProxy &task_proxy, const set &atoms, LandmarkNode &node, OrderingType type) { bool simple_lm_exists = false; // TODO: assign with FactPair::no_fact @@ -340,7 +340,7 @@ void LandmarkFactoryRpgSasp::build_disjunction_classes( void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( const TaskProxy &task_proxy, - vector> &disjunctive_pre, + vector> &disjunctive_pre, vector> &reached, const Landmark &landmark) { /* Compute disjunctive preconditions from all operators than can potentially @@ -388,7 +388,7 @@ void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( } for (const auto &pre : preconditions) { if (static_cast(used_operators[pre.first].size()) == num_ops) { - utils::HashSet pre_set; + set pre_set; pre_set.insert(pre.second.begin(), pre.second.end()); if (pre_set.size() > 1) { // otherwise this LM is not actually a disjunctive LM disjunctive_pre.push_back(pre_set); @@ -450,7 +450,7 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( approximate_lookahead_orders(task_proxy, reached, lm_node); // Process achieving operators again to find disjunctive LMs - vector> disjunctive_pre; + vector> disjunctive_pre; compute_disjunctive_preconditions( task_proxy, disjunctive_pre, reached, landmark); for (const auto &preconditions : disjunctive_pre) diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index d968feb289..16e5420e85 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -5,6 +5,7 @@ #include "../utils/hash.h" +#include #include #include #include @@ -39,7 +40,7 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { std::vector> &reached, const Landmark &landmark); void compute_disjunctive_preconditions( const TaskProxy &task_proxy, - std::vector> &disjunctive_pre, + std::vector> &disjunctive_pre, std::vector> &reached, const Landmark &landmark); @@ -49,7 +50,7 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { void found_simple_lm_and_order(const FactPair &atom, LandmarkNode &node, OrderingType type); void found_disj_lm_and_order(const TaskProxy &task_proxy, - const utils::HashSet &atoms, + const std::set &atoms, LandmarkNode &node, OrderingType type); void approximate_lookahead_orders(const TaskProxy &task_proxy, diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index f1e61c42d5..521382517f 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -53,14 +53,15 @@ bool LandmarkGraph::contains_disjunctive_landmark(const FactPair &atom) const { } bool LandmarkGraph::contains_overlapping_disjunctive_landmark( - const utils::HashSet &atoms) const { + const set &atoms) const { return any_of(atoms.begin(), atoms.end(), [&](const FactPair &atom) { return contains_disjunctive_landmark(atom); }); } bool LandmarkGraph::contains_identical_disjunctive_landmark( - const utils::HashSet &atoms) const { + const set &atoms) const { + // TODO: What's going on here??? const LandmarkNode *node = nullptr; for (const FactPair &atom : atoms) { auto it = disjunctive_landmarks_to_nodes.find(atom); @@ -117,11 +118,11 @@ LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark_to_add) { } void LandmarkGraph::remove_node_occurrences(LandmarkNode *node) { - for (const auto &[parent, type] : node->parents) { + for (LandmarkNode *parent : views::keys(node->parents)) { parent->children.erase(node); assert(!parent->children.contains(node)); } - for (const auto &[child, type] : node->children) { + for (LandmarkNode *child : views::keys(node->children)) { child->parents.erase(node); assert(!child->parents.contains(node)); } diff --git a/src/search/landmarks/landmark_graph.h b/src/search/landmarks/landmark_graph.h index a179c5fdee..72ac95ce5f 100644 --- a/src/search/landmarks/landmark_graph.h +++ b/src/search/landmarks/landmark_graph.h @@ -9,6 +9,7 @@ #include "../utils/memory.h" #include +#include #include #include @@ -136,11 +137,11 @@ class LandmarkGraph { when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ bool contains_overlapping_disjunctive_landmark( - const utils::HashSet &atoms) const; + const std::set &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ bool contains_identical_disjunctive_landmark( - const utils::HashSet &atoms) const; + const std::set &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ diff --git a/src/search/landmarks/util.cc b/src/search/landmarks/util.cc index 5388eecf12..998faea90e 100644 --- a/src/search/landmarks/util.cc +++ b/src/search/landmarks/util.cc @@ -1,13 +1,13 @@ #include "util.h" +#include + #include "landmark.h" #include "landmark_graph.h" #include "../task_proxy.h" #include "../utils/logging.h" -#include - using namespace std; namespace landmarks { @@ -91,7 +91,7 @@ static void dump_node( cout << " lm" << node.get_id() << " [label=\""; bool first = true; const Landmark &landmark = node.get_landmark(); - for (FactPair atom : landmark.atoms) { + for (const FactPair &atom : landmark.atoms) { if (!first) { if (landmark.is_disjunctive) { cout << " | "; @@ -146,10 +146,8 @@ void dump_landmark_graph( cout << "digraph G {\n"; for (const auto &node : graph) { dump_node(task_proxy, *node, log); - for (const auto &child : node->children) { - const LandmarkNode *child_node = child.first; - const OrderingType &type = child.second; - dump_ordering(node->get_id(), child_node->get_id(), type, log); + for (const auto &[child, type] : node->children) { + dump_ordering(node->get_id(), child->get_id(), type, log); } } cout << "}" << endl; From 7a438d66962f29fb6af40daf44de1db0cc941f3a Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 4 Mar 2025 11:52:37 +0100 Subject: [PATCH 016/108] Use ranges::any_of and ranges::all_of to recover from slowdown. --- src/search/landmarks/landmark.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/search/landmarks/landmark.cc b/src/search/landmarks/landmark.cc index 8b6d0b0b04..02b2171e07 100644 --- a/src/search/landmarks/landmark.cc +++ b/src/search/landmarks/landmark.cc @@ -8,10 +8,12 @@ bool Landmark::is_true_in_state(const State &state) const { return state[atom.var].get_value() == atom.value; }; if (is_disjunctive) { - return any_of(atoms.cbegin(), atoms.cend(), is_atom_true_in_state); + return ranges::any_of( + atoms.cbegin(), atoms.cend(), is_atom_true_in_state); } else { // Is conjunctive or simple. - return all_of(atoms.cbegin(), atoms.cend(), is_atom_true_in_state); + return ranges::all_of( + atoms.cbegin(), atoms.cend(), is_atom_true_in_state); } } } From e1bd0a74f9757cf42f77cdd17cc2692410268720 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 4 Mar 2025 14:55:58 +0100 Subject: [PATCH 017/108] test --- src/search/task_proxy.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 4a07f81518..c652947161 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -95,7 +95,7 @@ using PackedStateBin = int_packer::IntPacker::Bin; /* Basic iterator support for proxy collections. */ -template +template class ProxyIterator { /* We store a pointer to collection instead of a reference because iterators have to be copy assignable. */ @@ -103,7 +103,7 @@ class ProxyIterator { std::size_t pos; public: using iterator_category = std::input_iterator_tag; - using value_type = typename ProxyCollection::ItemType; + using value_type = ValueType; using difference_type = int; using pointer = const value_type *; using reference = value_type; From 1a5350d12181b6f5a2dab1eeceb82b7e30761c52 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 4 Mar 2025 15:41:28 +0100 Subject: [PATCH 018/108] test2 --- src/search/task_proxy.h | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index c652947161..6b710f410a 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -91,11 +91,16 @@ using PackedStateBin = int_packer::IntPacker::Bin; task_properties.h module. */ +template +concept has_item_type = requires { + typename T::ItemType; +}; /* Basic iterator support for proxy collections. */ -template +template +requires has_item_type class ProxyIterator { /* We store a pointer to collection instead of a reference because iterators have to be copy assignable. */ @@ -103,7 +108,7 @@ class ProxyIterator { std::size_t pos; public: using iterator_category = std::input_iterator_tag; - using value_type = ValueType; + using value_type = typename ProxyCollection::ItemType; using difference_type = int; using pointer = const value_type *; using reference = value_type; From 030fa0548c7dcb10e8880c1b2a471cccb616d186 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 5 Mar 2025 12:55:27 +0100 Subject: [PATCH 019/108] Fix style and improve use of concept. --- src/search/landmarks/exploration.cc | 10 +++++----- src/search/task_proxy.h | 5 ++--- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/src/search/landmarks/exploration.cc b/src/search/landmarks/exploration.cc index 324496e78a..456b1b9d22 100644 --- a/src/search/landmarks/exploration.cc +++ b/src/search/landmarks/exploration.cc @@ -124,12 +124,12 @@ void Exploration::build_unary_operators(const OperatorProxy &op) { } sort(preconditions.begin(), preconditions.end()); for (EffectProxy effect : op.get_effects()) { - vector precondition_propositions = - get_sorted_precondition_propositions(preconditions, effect); + vector precondition_propositions = + get_sorted_precondition_propositions(preconditions, effect); auto [var, value] = effect.get_fact().get_pair(); - Proposition *effect_proposition = &propositions[var][value]; - unary_operators.emplace_back( - precondition_propositions, effect_proposition, op_or_axiom_id); + Proposition *effect_proposition = &propositions[var][value]; + unary_operators.emplace_back( + precondition_propositions, effect_proposition, op_or_axiom_id); // Cross-reference unary operators. for (Proposition *pre : precondition_propositions) { diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 6b710f410a..0cdd170bdf 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -92,15 +92,14 @@ using PackedStateBin = int_packer::IntPacker::Bin; */ template -concept has_item_type = requires { +concept item_typed = requires { typename T::ItemType; }; /* Basic iterator support for proxy collections. */ -template -requires has_item_type +template class ProxyIterator { /* We store a pointer to collection instead of a reference because iterators have to be copy assignable. */ From 5c38893631d0a0f1dec7c1e103316b37523a453f Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Mar 2025 08:59:10 +0100 Subject: [PATCH 020/108] Add TODO note to concept stuff. --- src/search/task_proxy.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 0cdd170bdf..139fb8ee69 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -91,6 +91,8 @@ using PackedStateBin = int_packer::IntPacker::Bin; task_properties.h module. */ +/* TODO: Implement this in a separate issue and merge into this before + merging issue992 into main. */ template concept item_typed = requires { typename T::ItemType; From 34132de7e274a20bbba1980c16c8b4d9d28561f2 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Mar 2025 15:29:01 +0100 Subject: [PATCH 021/108] Clean up landmark utils. --- src/search/landmarks/landmark.cc | 4 + src/search/landmarks/landmark.h | 1 + .../landmark_cost_partitioning_algorithms.cc | 2 +- .../landmarks/landmark_factory_rpg_sasp.cc | 13 ++++ .../landmarks/landmark_status_manager.cc | 6 +- src/search/landmarks/util.cc | 75 ++++++------------- src/search/landmarks/util.h | 5 -- 7 files changed, 46 insertions(+), 60 deletions(-) diff --git a/src/search/landmarks/landmark.cc b/src/search/landmarks/landmark.cc index 02b2171e07..77766659c1 100644 --- a/src/search/landmarks/landmark.cc +++ b/src/search/landmarks/landmark.cc @@ -16,4 +16,8 @@ bool Landmark::is_true_in_state(const State &state) const { atoms.cbegin(), atoms.cend(), is_atom_true_in_state); } } + +bool Landmark::contains(const FactPair &atom) const { + return find(atoms.begin(), atoms.end(), atom) != atoms.end(); +} } diff --git a/src/search/landmarks/landmark.h b/src/search/landmarks/landmark.h index 17f83d8ccf..ee1b324088 100644 --- a/src/search/landmarks/landmark.h +++ b/src/search/landmarks/landmark.h @@ -37,6 +37,7 @@ class Landmark { std::unordered_set possible_achievers; bool is_true_in_state(const State &state) const; + bool contains(const FactPair &atom) const; }; } #endif diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc index c3842586a9..b41ac2fa15 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc @@ -95,7 +95,7 @@ vector UniformCostPartitioningAlgorithm::second_pass( if (covered_by_action_landmark) { for (int op_id : achievers) { assert(utils::in_bounds( - op_id, landmarks_achieved_by_operator)); + op_id, landmarks_achieved_by_operator)); --landmarks_achieved_by_operator[op_id]; } } else { diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index ddf6805ace..f703677373 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -17,6 +17,19 @@ using namespace std; using utils::ExitCode; namespace landmarks { +static unordered_map _intersect( + const unordered_map &a, const unordered_map &b) { + if (a.size() > b.size()) + return _intersect(b, a); + unordered_map result; + for (const auto &pair_a : a) { + const auto it_b = b.find(pair_a.first); + if (it_b != b.end() && it_b->second == pair_a.second) + result.insert(pair_a); + } + return result; +} + LandmarkFactoryRpgSasp::LandmarkFactoryRpgSasp( bool disjunctive_landmarks, bool use_orders, utils::Verbosity verbosity) : LandmarkFactoryRelaxation(verbosity), diff --git a/src/search/landmarks/landmark_status_manager.cc b/src/search/landmarks/landmark_status_manager.cc index 6b4c6a782f..56b15fe84a 100644 --- a/src/search/landmarks/landmark_status_manager.cc +++ b/src/search/landmarks/landmark_status_manager.cc @@ -52,9 +52,9 @@ static vector>> get_reas LandmarkStatusManager::LandmarkStatusManager( LandmarkGraph &landmark_graph, - const bool progress_goals, - const bool progress_greedy_necessary_orderings, - const bool progress_reasonable_orderings) + bool progress_goals, + bool progress_greedy_necessary_orderings, + bool progress_reasonable_orderings) : landmark_graph(landmark_graph), goal_landmarks(progress_goals ? get_goal_landmarks(landmark_graph) : vector{}), diff --git a/src/search/landmarks/util.cc b/src/search/landmarks/util.cc index ae4af2fb1f..7040921f13 100644 --- a/src/search/landmarks/util.cc +++ b/src/search/landmarks/util.cc @@ -1,7 +1,5 @@ #include "util.h" -#include - #include "landmark.h" #include "landmark_graph.h" @@ -11,58 +9,36 @@ using namespace std; namespace landmarks { -static bool _possibly_fires(const EffectConditionsProxy &conditions, const vector> &reached) { - for (FactProxy cond : conditions) - if (!reached[cond.get_variable().get_id()][cond.get_value()]) - return false; - return true; -} - -unordered_map _intersect(const unordered_map &a, const unordered_map &b) { - if (a.size() > b.size()) - return _intersect(b, a); - unordered_map result; - for (const auto &pair_a : a) { - const auto it_b = b.find(pair_a.first); - if (it_b != b.end() && it_b->second == pair_a.second) - result.insert(pair_a); - } - return result; +static bool condition_is_reachable( + const ConditionsProxy &conditions, const vector> &reached) { + return all_of(begin(conditions), end(conditions), + [reached](const FactProxy &condition) { + auto [var, value] = condition.get_pair(); + return reached[var][value]; + }); } +/* Check whether operator `op` can possibly make `landmark` true in a + relaxed task (as given by the reachability information in reached). */ bool possibly_reaches_landmark(const OperatorProxy &op, const vector> &reached, const Landmark &landmark) { - /* Check whether operator o can possibly make `landmark` true in a - relaxed task (as given by the reachability information in reached) */ - assert(!reached.empty()); - - // Test whether all preconditions of o can be reached - // Otherwise, operator is not applicable - PreconditionsProxy preconditions = op.get_preconditions(); - for (FactProxy pre : preconditions) - if (!reached[pre.get_variable().get_id()][pre.get_value()]) - return false; - - // Go through all effects of o and check whether one can reach a - // proposition in `landmark`. - for (EffectProxy effect: op.get_effects()) { - FactProxy effect_fact = effect.get_fact(); - assert(!reached[effect_fact.get_variable().get_id()].empty()); - for (const FactPair &atom : landmark.atoms) { - if (effect_fact.get_pair() == atom) { - if (_possibly_fires(effect.get_conditions(), reached)) - return true; - break; - } - } + if (!condition_is_reachable(op.get_preconditions(), reached)) { + // Operator `op` is not applicable. + return false; } - return false; + // Check whether an effect of `op` reaches an atom in `landmark`. + EffectsProxy effects = op.get_effects(); + return any_of(begin(effects), end(effects), [&](const EffectProxy &effect) { + return landmark.contains(effect.get_fact().get_pair()) && + condition_is_reachable(effect.get_conditions(), reached); + }); } -OperatorProxy get_operator_or_axiom(const TaskProxy &task_proxy, int op_or_axiom_id) { +OperatorProxy get_operator_or_axiom(const TaskProxy &task_proxy, + int op_or_axiom_id) { if (op_or_axiom_id < 0) { return task_proxy.get_axioms()[-op_or_axiom_id - 1]; } else { @@ -79,7 +55,7 @@ int get_operator_or_axiom_id(const OperatorProxy &op) { } /* - The below functions use cout on purpose for dumping a landmark graph. + The functions below use cout on purpose for dumping a landmark graph. TODO: ideally, this should be written to a file or through a logger at least, but without the time and memory stamps. */ @@ -88,16 +64,13 @@ static void dump_node( const LandmarkNode &node, utils::LogProxy &log) { if (log.is_at_least_debug()) { + const Landmark &landmark = node.get_landmark(); + char delimiter = landmark.is_disjunctive ? '|' : '&'; cout << " lm" << node.get_id() << " [label=\""; bool first = true; - const Landmark &landmark = node.get_landmark(); for (const FactPair &atom : landmark.atoms) { if (!first) { - if (landmark.is_disjunctive) { - cout << " | "; - } else if (landmark.is_conjunctive) { - cout << " & "; - } + cout << " " << delimiter << " "; } first = false; VariableProxy var = task_proxy.get_variables()[atom.var]; diff --git a/src/search/landmarks/util.h b/src/search/landmarks/util.h index 732836268c..b8b70068b5 100644 --- a/src/search/landmarks/util.h +++ b/src/search/landmarks/util.h @@ -1,7 +1,6 @@ #ifndef LANDMARKS_UTIL_H #define LANDMARKS_UTIL_H -#include #include class OperatorProxy; @@ -16,10 +15,6 @@ class Landmark; class LandmarkNode; class LandmarkGraph; -extern std::unordered_map _intersect( - const std::unordered_map &a, - const std::unordered_map &b); - extern bool possibly_reaches_landmark( const OperatorProxy &op, const std::vector> &reached, const Landmark &landmark); From ac56a439ea7da5ca23e2a135d8ec25e2aa9b7287 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Mar 2025 15:59:36 +0100 Subject: [PATCH 022/108] Change member order to private, protected, public. --- src/search/landmarks/landmark_factory.cc | 163 +++++++++++------------ src/search/landmarks/landmark_factory.h | 36 +++-- 2 files changed, 97 insertions(+), 102 deletions(-) diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index f8848ae728..326ac168c2 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -21,6 +21,86 @@ LandmarkFactory::LandmarkFactory(utils::Verbosity verbosity) : log(utils::get_log_for_verbosity(verbosity)), landmark_graph(nullptr) { } +void LandmarkFactory::generate_operators_lookups(const TaskProxy &task_proxy) { + /* Build datastructures for efficient landmark computation. Map propositions + to the operators that achieve them or have them as preconditions */ + + VariablesProxy variables = task_proxy.get_variables(); + operators_eff_lookup.resize(variables.size()); + for (VariableProxy var : variables) { + operators_eff_lookup[var.get_id()].resize(var.get_domain_size()); + } + OperatorsProxy operators = task_proxy.get_operators(); + for (OperatorProxy op : operators) { + const EffectsProxy effects = op.get_effects(); + for (EffectProxy effect : effects) { + const FactProxy effect_fact = effect.get_fact(); + operators_eff_lookup[effect_fact.get_variable().get_id()][effect_fact.get_value()].push_back( + get_operator_or_axiom_id(op)); + } + } + for (OperatorProxy axiom : task_proxy.get_axioms()) { + const EffectsProxy effects = axiom.get_effects(); + for (EffectProxy effect : effects) { + const FactProxy effect_fact = effect.get_fact(); + operators_eff_lookup[effect_fact.get_variable().get_id()][effect_fact.get_value()].push_back( + get_operator_or_axiom_id(axiom)); + } + } +} + +void LandmarkFactory::add_ordering(LandmarkNode &from, LandmarkNode &to, + OrderingType type) { + /* Adds an ordering in the landmarks graph. If an ordering between the same + landmarks is already present, the stronger ordering type wins. */ + assert(&from != &to); + + // If ordering already exists, remove if weaker. + if (from.children.find(&to) != from.children.end() && from.children.find( + &to)->second < type) { + from.children.erase(&to); + assert(to.parents.find(&from) != to.parents.end()); + to.parents.erase(&from); + + assert(to.parents.find(&from) == to.parents.end()); + assert(from.children.find(&to) == from.children.end()); + } + // If ordering does not exist (or has just been removed), insert. + if (from.children.find(&to) == from.children.end()) { + assert(to.parents.find(&from) == to.parents.end()); + from.children.emplace(&to, type); + to.parents.emplace(&from, type); + if (log.is_at_least_debug()) { + log << "added parent with address " << &from << endl; + } + } + assert(from.children.find(&to) != from.children.end()); + assert(to.parents.find(&from) != to.parents.end()); +} + +void LandmarkFactory::discard_all_orderings() { + if (log.is_at_least_normal()) { + log << "Removing all orderings." << endl; + } + for (const auto &node : *landmark_graph) { + node->children.clear(); + node->parents.clear(); + } +} + +bool LandmarkFactory::is_landmark_precondition( + const OperatorProxy &op, const Landmark &landmark) const { + /* Test whether the landmark is used by the operator as a precondition. + A disjunctive landmarks is used if one of its disjuncts is used. */ + for (FactProxy pre : op.get_preconditions()) { + for (const FactPair &atom : landmark.atoms) { + if (pre.get_pair() == atom) + return true; + } + } + return false; +} + /* TODO: Update this comment @@ -83,92 +163,11 @@ shared_ptr LandmarkFactory::compute_landmark_graph( return landmark_graph; } -bool LandmarkFactory::is_landmark_precondition( - const OperatorProxy &op, const Landmark &landmark) const { - /* Test whether the landmark is used by the operator as a precondition. - A disjunctive landmarks is used if one of its disjuncts is used. */ - for (FactProxy pre : op.get_preconditions()) { - for (const FactPair &atom : landmark.atoms) { - if (pre.get_pair() == atom) - return true; - } - } - return false; -} - -void LandmarkFactory::add_ordering(LandmarkNode &from, LandmarkNode &to, - OrderingType type) { - /* Adds an ordering in the landmarks graph. If an ordering between the same - landmarks is already present, the stronger ordering type wins. */ - assert(&from != &to); - - // If ordering already exists, remove if weaker. - if (from.children.find(&to) != from.children.end() && from.children.find( - &to)->second < type) { - from.children.erase(&to); - assert(to.parents.find(&from) != to.parents.end()); - to.parents.erase(&from); - - assert(to.parents.find(&from) == to.parents.end()); - assert(from.children.find(&to) == from.children.end()); - } - // If ordering does not exist (or has just been removed), insert. - if (from.children.find(&to) == from.children.end()) { - assert(to.parents.find(&from) == to.parents.end()); - from.children.emplace(&to, type); - to.parents.emplace(&from, type); - if (log.is_at_least_debug()) { - log << "added parent with address " << &from << endl; - } - } - assert(from.children.find(&to) != from.children.end()); - assert(to.parents.find(&from) != to.parents.end()); -} - -void LandmarkFactory::discard_all_orderings() { - if (log.is_at_least_normal()) { - log << "Removing all orderings." << endl; - } - for (const auto &node : *landmark_graph) { - node->children.clear(); - node->parents.clear(); - } -} - -void LandmarkFactory::generate_operators_lookups(const TaskProxy &task_proxy) { - /* Build datastructures for efficient landmark computation. Map propositions - to the operators that achieve them or have them as preconditions */ - - VariablesProxy variables = task_proxy.get_variables(); - operators_eff_lookup.resize(variables.size()); - for (VariableProxy var : variables) { - operators_eff_lookup[var.get_id()].resize(var.get_domain_size()); - } - OperatorsProxy operators = task_proxy.get_operators(); - for (OperatorProxy op : operators) { - const EffectsProxy effects = op.get_effects(); - for (EffectProxy effect : effects) { - const FactProxy effect_fact = effect.get_fact(); - operators_eff_lookup[effect_fact.get_variable().get_id()][effect_fact.get_value()].push_back( - get_operator_or_axiom_id(op)); - } - } - for (OperatorProxy axiom : task_proxy.get_axioms()) { - const EffectsProxy effects = axiom.get_effects(); - for (EffectProxy effect : effects) { - const FactProxy effect_fact = effect.get_fact(); - operators_eff_lookup[effect_fact.get_variable().get_id()][effect_fact.get_value()].push_back( - get_operator_or_axiom_id(axiom)); - } - } -} - void add_landmark_factory_options_to_feature(plugins::Feature &feature) { utils::add_log_options_to_feature(feature); } -tuple -get_landmark_factory_arguments_from_options( +tuple get_landmark_factory_arguments_from_options( const plugins::Options &opts) { return utils::get_log_arguments_from_options(opts); } diff --git a/src/search/landmarks/landmark_factory.h b/src/search/landmarks/landmark_factory.h index d94564e38d..df89359391 100644 --- a/src/search/landmarks/landmark_factory.h +++ b/src/search/landmarks/landmark_factory.h @@ -17,29 +17,20 @@ class Feature; } namespace landmarks { -/* - TODO: Change order to private -> protected -> public - (omitted so far to minimize diff) -*/ class LandmarkFactory { -public: - virtual ~LandmarkFactory() = default; - LandmarkFactory(const LandmarkFactory &) = delete; - - std::shared_ptr compute_landmark_graph(const std::shared_ptr &task); - - virtual bool supports_conditional_effects() const = 0; + AbstractTask *landmark_graph_task; + std::vector>> operators_eff_lookup; - bool achievers_are_calculated() const { - return achievers_calculated; - } + virtual void generate_landmarks(const std::shared_ptr &task) = 0; + void generate_operators_lookups(const TaskProxy &task_proxy); protected: - explicit LandmarkFactory(utils::Verbosity verbosity); mutable utils::LogProxy log; std::shared_ptr landmark_graph; bool achievers_calculated = false; + explicit LandmarkFactory(utils::Verbosity verbosity); + void add_ordering(LandmarkNode &from, LandmarkNode &to, OrderingType type); void discard_all_orderings(); @@ -51,12 +42,17 @@ class LandmarkFactory { return operators_eff_lookup[eff.var][eff.value]; } -private: - AbstractTask *landmark_graph_task; - std::vector>> operators_eff_lookup; +public: + virtual ~LandmarkFactory() = default; + LandmarkFactory(const LandmarkFactory &) = delete; - virtual void generate_landmarks(const std::shared_ptr &task) = 0; - void generate_operators_lookups(const TaskProxy &task_proxy); + std::shared_ptr compute_landmark_graph(const std::shared_ptr &task); + + virtual bool supports_conditional_effects() const = 0; + + bool achievers_are_calculated() const { + return achievers_calculated; + } }; extern void add_landmark_factory_options_to_feature(plugins::Feature &feature); From 891bb4d4fc09d2eca5093fed610509fb783af266 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Mar 2025 17:46:17 +0100 Subject: [PATCH 023/108] Clean up landmark factory base class header file. --- src/search/landmarks/landmark_factory.cc | 33 +++++++------------ src/search/landmarks/landmark_factory.h | 20 +++++------ src/search/landmarks/landmark_factory_h_m.cc | 6 +--- src/search/landmarks/landmark_factory_h_m.h | 1 + .../landmark_factory_reasonable_orders_hps.cc | 3 +- .../landmarks/landmark_factory_relaxation.cc | 2 +- .../landmarks/landmark_factory_rpg_sasp.cc | 11 ++++--- .../landmarks/landmark_factory_zhu_givan.cc | 2 ++ 8 files changed, 34 insertions(+), 44 deletions(-) diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index 326ac168c2..c8f8cd151a 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -8,34 +8,36 @@ #include "../plugins/plugin.h" #include "../utils/logging.h" -#include "../utils/memory.h" #include "../utils/timer.h" #include -#include +#include + using namespace std; namespace landmarks { LandmarkFactory::LandmarkFactory(utils::Verbosity verbosity) - : log(utils::get_log_for_verbosity(verbosity)), landmark_graph(nullptr) { + : log(get_log_for_verbosity(verbosity)), landmark_graph(nullptr) { } -void LandmarkFactory::generate_operators_lookups(const TaskProxy &task_proxy) { +void LandmarkFactory::compute_operators_providing_effect( + const TaskProxy &task_proxy) { + // TODO: Update comment. /* Build datastructures for efficient landmark computation. Map propositions to the operators that achieve them or have them as preconditions */ VariablesProxy variables = task_proxy.get_variables(); - operators_eff_lookup.resize(variables.size()); + operators_providing_effect.resize(variables.size()); for (VariableProxy var : variables) { - operators_eff_lookup[var.get_id()].resize(var.get_domain_size()); + operators_providing_effect[var.get_id()].resize(var.get_domain_size()); } OperatorsProxy operators = task_proxy.get_operators(); for (OperatorProxy op : operators) { const EffectsProxy effects = op.get_effects(); for (EffectProxy effect : effects) { const FactProxy effect_fact = effect.get_fact(); - operators_eff_lookup[effect_fact.get_variable().get_id()][effect_fact.get_value()].push_back( + operators_providing_effect[effect_fact.get_variable().get_id()][effect_fact.get_value()].push_back( get_operator_or_axiom_id(op)); } } @@ -43,7 +45,7 @@ void LandmarkFactory::generate_operators_lookups(const TaskProxy &task_proxy) { const EffectsProxy effects = axiom.get_effects(); for (EffectProxy effect : effects) { const FactProxy effect_fact = effect.get_fact(); - operators_eff_lookup[effect_fact.get_variable().get_id()][effect_fact.get_value()].push_back( + operators_providing_effect[effect_fact.get_variable().get_id()][effect_fact.get_value()].push_back( get_operator_or_axiom_id(axiom)); } } @@ -88,19 +90,6 @@ void LandmarkFactory::discard_all_orderings() { } } -bool LandmarkFactory::is_landmark_precondition( - const OperatorProxy &op, const Landmark &landmark) const { - /* Test whether the landmark is used by the operator as a precondition. - A disjunctive landmarks is used if one of its disjuncts is used. */ - for (FactProxy pre : op.get_preconditions()) { - for (const FactPair &atom : landmark.atoms) { - if (pre.get_pair() == atom) - return true; - } - } - return false; -} - /* TODO: Update this comment @@ -139,7 +128,7 @@ shared_ptr LandmarkFactory::compute_landmark_graph( landmark_graph = make_shared(); TaskProxy task_proxy(*task); - generate_operators_lookups(task_proxy); + compute_operators_providing_effect(task_proxy); generate_landmarks(task); if (log.is_at_least_normal()) { diff --git a/src/search/landmarks/landmark_factory.h b/src/search/landmarks/landmark_factory.h index df89359391..119caef178 100644 --- a/src/search/landmarks/landmark_factory.h +++ b/src/search/landmarks/landmark_factory.h @@ -5,8 +5,6 @@ #include "../utils/logging.h" -#include -#include #include class TaskProxy; @@ -19,10 +17,11 @@ class Feature; namespace landmarks { class LandmarkFactory { AbstractTask *landmark_graph_task; - std::vector>> operators_eff_lookup; + std::vector>> operators_providing_effect; - virtual void generate_landmarks(const std::shared_ptr &task) = 0; - void generate_operators_lookups(const TaskProxy &task_proxy); + virtual void generate_landmarks( + const std::shared_ptr &task) = 0; + void compute_operators_providing_effect(const TaskProxy &task_proxy); protected: mutable utils::LogProxy log; @@ -35,18 +34,17 @@ class LandmarkFactory { void discard_all_orderings(); - bool is_landmark_precondition(const OperatorProxy &op, - const Landmark &landmark) const; - - const std::vector &get_operators_including_eff(const FactPair &eff) const { - return operators_eff_lookup[eff.var][eff.value]; + const std::vector &get_operators_including_effect( + const FactPair &eff) const { + return operators_providing_effect[eff.var][eff.value]; } public: virtual ~LandmarkFactory() = default; LandmarkFactory(const LandmarkFactory &) = delete; - std::shared_ptr compute_landmark_graph(const std::shared_ptr &task); + std::shared_ptr compute_landmark_graph( + const std::shared_ptr &task); virtual bool supports_conditional_effects() const = 0; diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 86eee4bb51..da86db0b1c 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -609,10 +609,6 @@ void LandmarkFactoryHM::postprocess(const TaskProxy &task_proxy) { if (!conjunctive_landmarks) discard_conjunctive_landmarks(); landmark_graph->set_landmark_ids(); - - if (!use_orders) - discard_all_orderings(); - calc_achievers(task_proxy); } @@ -642,7 +638,7 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { set candidates; // put all possible adders in candidates set for (const FactPair &atom : landmark.atoms) { - const vector &ops = get_operators_including_eff(atom); + const vector &ops = get_operators_including_effect(atom); candidates.insert(ops.begin(), ops.end()); } diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 486337b48c..f3b646c088 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -4,6 +4,7 @@ #include "landmark_factory.h" #include +#include namespace landmarks { using FluentSet = std::vector; diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index ad5cd2f7d7..a88b96c23b 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -10,6 +10,7 @@ #include "../utils/markup.h" #include +#include #include using namespace std; @@ -143,7 +144,7 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( unordered_map shared_eff; bool init = true; const vector &op_or_axiom_ids = - get_operators_including_eff(atom_a); + get_operators_including_effect(atom_a); // Intersect operators that achieve a one by one for (int op_or_axiom_id : op_or_axiom_ids) { // If no shared effect among previous operators, break diff --git a/src/search/landmarks/landmark_factory_relaxation.cc b/src/search/landmarks/landmark_factory_relaxation.cc index 7919cc6db4..6b1407df81 100644 --- a/src/search/landmarks/landmark_factory_relaxation.cc +++ b/src/search/landmarks/landmark_factory_relaxation.cc @@ -33,7 +33,7 @@ void LandmarkFactoryRelaxation::calc_achievers( for (const auto &node : *landmark_graph) { Landmark &landmark = node->get_landmark(); for (const FactPair &atom : landmark.atoms) { - const vector &ops = get_operators_including_eff(atom); + const vector &ops = get_operators_including_effect(atom); landmark.possible_achievers.insert(ops.begin(), ops.end()); if (variables[atom.var].is_derived()) diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index f703677373..fd7e00148a 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -12,6 +12,7 @@ #include "../utils/system.h" #include +#include using namespace std; using utils::ExitCode; @@ -264,7 +265,7 @@ void LandmarkFactoryRpgSasp::compute_shared_preconditions( */ bool init = true; for (const FactPair &atom : landmark.atoms) { - const vector &op_ids = get_operators_including_eff(atom); + const vector &op_ids = get_operators_including_effect(atom); for (int op_or_axiom_id : op_ids) { OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axiom_id); @@ -367,7 +368,7 @@ void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( vector op_or_axiom_ids; for (const FactPair &atom : landmark.atoms) { const vector &tmp_op_or_axiom_ids = - get_operators_including_eff(atom); + get_operators_including_effect(atom); for (int op_or_axiom_id : tmp_op_or_axiom_ids) op_or_axiom_ids.push_back(op_or_axiom_id); } @@ -482,6 +483,8 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( discard_disjunctive_landmarks(); } + /* TODO: Ensure that landmark orderings are not even added if + `use_orders` is false. */ if (!use_orders) { discard_all_orderings(); } @@ -596,9 +599,9 @@ void LandmarkFactoryRpgSasp::find_forward_orders( and (var, value) at the same time. */ bool intersection_empty = true; const vector &atom_achievers = - get_operators_including_eff(atom); + get_operators_including_effect(atom); const vector &landmark_achievers = - get_operators_including_eff(landmark_atom); + get_operators_including_effect(landmark_atom); for (size_t j = 0; j < atom_achievers.size() && intersection_empty; ++j) for (size_t k = 0; k < landmark_achievers.size() && intersection_empty; ++k) diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index 8f3a001925..d7b230cc0d 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -35,6 +35,8 @@ void LandmarkFactoryZhuGivan::generate_relaxed_landmarks( extract_landmarks(task_proxy, last_prop_layer); + /* TODO: Ensure that landmark orderings are not even added if + `use_orders` is false. */ if (!use_orders) { discard_all_orderings(); } From b6038e00c9c6df0f5f7678d428945c6aa5a38941 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Mar 2025 20:13:54 +0100 Subject: [PATCH 024/108] Clean up landmark_factory.cc. --- src/search/landmarks/landmark_factory.cc | 173 ++++++++++-------- src/search/landmarks/landmark_factory.h | 15 +- src/search/landmarks/landmark_factory_h_m.cc | 10 +- .../landmarks/landmark_factory_merged.cc | 3 +- .../landmark_factory_reasonable_orders_hps.cc | 6 +- .../landmarks/landmark_factory_rpg_sasp.cc | 16 +- .../landmarks/landmark_factory_zhu_givan.cc | 3 +- 7 files changed, 137 insertions(+), 89 deletions(-) diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index c8f8cd151a..db9a91a555 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -21,66 +21,87 @@ LandmarkFactory::LandmarkFactory(utils::Verbosity verbosity) : log(get_log_for_verbosity(verbosity)), landmark_graph(nullptr) { } -void LandmarkFactory::compute_operators_providing_effect( +void LandmarkFactory::resize_operators_providing_effect( const TaskProxy &task_proxy) { - // TODO: Update comment. - /* Build datastructures for efficient landmark computation. Map propositions - to the operators that achieve them or have them as preconditions */ - VariablesProxy variables = task_proxy.get_variables(); operators_providing_effect.resize(variables.size()); - for (VariableProxy var : variables) { + for (const VariableProxy &var : variables) { operators_providing_effect[var.get_id()].resize(var.get_domain_size()); } - OperatorsProxy operators = task_proxy.get_operators(); - for (OperatorProxy op : operators) { - const EffectsProxy effects = op.get_effects(); - for (EffectProxy effect : effects) { - const FactProxy effect_fact = effect.get_fact(); - operators_providing_effect[effect_fact.get_variable().get_id()][effect_fact.get_value()].push_back( - get_operator_or_axiom_id(op)); - } +} + +void LandmarkFactory::add_operator_or_axiom_providing_effects( + const OperatorProxy &op_or_axiom) { + EffectsProxy effects = op_or_axiom.get_effects(); + for (EffectProxy effect : effects) { + auto [var, value] = effect.get_fact().get_pair(); + operators_providing_effect[var][value].push_back( + get_operator_or_axiom_id(op_or_axiom)); } - for (OperatorProxy axiom : task_proxy.get_axioms()) { - const EffectsProxy effects = axiom.get_effects(); - for (EffectProxy effect : effects) { - const FactProxy effect_fact = effect.get_fact(); - operators_providing_effect[effect_fact.get_variable().get_id()][effect_fact.get_value()].push_back( - get_operator_or_axiom_id(axiom)); - } +} + +/* Build datastructures for efficient landmark computation: Map propositions to + the operators and axioms that achieve them. */ +void LandmarkFactory::compute_operators_providing_effect( + const TaskProxy &task_proxy) { + resize_operators_providing_effect(task_proxy); + for (const OperatorProxy &op : task_proxy.get_operators()) { + add_operator_or_axiom_providing_effects(op); + } + for (const OperatorProxy &axiom : task_proxy.get_axioms()) { + add_operator_or_axiom_providing_effects(axiom); } } -void LandmarkFactory::add_ordering(LandmarkNode &from, LandmarkNode &to, - OrderingType type) { - /* Adds an ordering in the landmarks graph. If an ordering between the same - landmarks is already present, the stronger ordering type wins. */ - assert(&from != &to); +static bool weaker_ordering_exists( + LandmarkNode &from, LandmarkNode &to, OrderingType type) { + auto it = from.children.find(&to); + if (it == from.children.end()) { + return false; + } else { + return it->second < type; + } +} - // If ordering already exists, remove if weaker. - if (from.children.find(&to) != from.children.end() && from.children.find( - &to)->second < type) { - from.children.erase(&to); - assert(to.parents.find(&from) != to.parents.end()); - to.parents.erase(&from); +static void remove_weaker_ordering(LandmarkNode &from, LandmarkNode &to) { + from.children.erase(&to); + assert(to.parents.find(&from) != to.parents.end()); + to.parents.erase(&from); - assert(to.parents.find(&from) == to.parents.end()); - assert(from.children.find(&to) == from.children.end()); - } - // If ordering does not exist (or has just been removed), insert. - if (from.children.find(&to) == from.children.end()) { - assert(to.parents.find(&from) == to.parents.end()); + assert(to.parents.find(&from) == to.parents.end()); + assert(from.children.find(&to) == from.children.end()); +} + +void LandmarkFactory::add_ordering( + LandmarkNode &from, LandmarkNode &to, OrderingType type) const { + if (from.children.contains(&to)) { + assert(to.parents.contains(&from)); from.children.emplace(&to, type); to.parents.emplace(&from, type); if (log.is_at_least_debug()) { log << "added parent with address " << &from << endl; } } - assert(from.children.find(&to) != from.children.end()); - assert(to.parents.find(&from) != to.parents.end()); } -void LandmarkFactory::discard_all_orderings() { +/* Adds an ordering in the landmark graph. If an ordering between the same + landmarks is already present, the stronger ordering type wins. */ +void LandmarkFactory::add_ordering_or_replace_if_stronger( + LandmarkNode &from, LandmarkNode &to, OrderingType type) const { + // TODO: Understand why self-loops are not allowed. + assert(&from != &to); + + if (weaker_ordering_exists(from, to, type)) { + remove_weaker_ordering(from, to); + } + if (!from.children.contains(&to)) { + add_ordering(from, to, type); + } + assert(from.children.contains(&to)); + assert(to.parents.contains(&from)); +} + +void LandmarkFactory::discard_all_orderings() const { if (log.is_at_least_normal()) { log << "Removing all orderings." << endl; } @@ -90,26 +111,51 @@ void LandmarkFactory::discard_all_orderings() { } } -/* - TODO: Update this comment +void LandmarkFactory::log_landmark_graph_info( + const TaskProxy &task_proxy, + const utils::Timer &landmark_generation_timer) const { + if (log.is_at_least_normal()) { + log << "Landmarks generation time: " + << landmark_generation_timer << endl; + if (landmark_graph->get_num_landmarks() == 0) { + if (log.is_warning()) { + log << "Warning! No landmarks found. Task unsolvable?" << endl; + } + } else { + log << "Discovered " << landmark_graph->get_num_landmarks() + << " landmarks, of which " + << landmark_graph->get_num_disjunctive_landmarks() + << " are disjunctive and " + << landmark_graph->get_num_conjunctive_landmarks() + << " are conjunctive.\nThere are " + << landmark_graph->get_num_orderings() + << " landmark orderings." << endl; + } + } + if (log.is_at_least_debug()) { + dump_landmark_graph(task_proxy, *landmark_graph, log); + } +} + +/* Note: To allow reusing landmark graphs, we use the following temporary solution. - Landmark factories cache the first landmark graph they compute, so - each call to this function returns the same graph. Asking for landmark graphs - of different tasks is an error and will exit with SEARCH_UNSUPPORTED. + Landmark factories cache the first landmark graph they compute, so each call + to this function returns the same graph. Asking for landmark graphs of + different tasks is an error and will exit with SEARCH_UNSUPPORTED. - If you want to compute different landmark graphs for different - Exploration objects, you have to use separate landmark factories. + If you want to compute different landmark graphs for different Exploration + objects, you have to use separate landmark factories. - This solution remains temporary as long as the question of when and - how to reuse landmark graphs is open. + This solution remains temporary as long as the question of when and how to + reuse landmark graphs is open. - As all heuristics will work on task transformations in the future, - this function will also get access to a TaskProxy. Then we need to - ensure that the TaskProxy used by the Exploration object is the same - as the TaskProxy object passed to this function. + As all heuristics will work on task transformations in the future, this + function will also get access to a TaskProxy. Then we need to ensure that the + TaskProxy used by the Exploration object is the same as the TaskProxy object + passed to this function. */ shared_ptr LandmarkFactory::compute_landmark_graph( const shared_ptr &task) { @@ -131,24 +177,7 @@ shared_ptr LandmarkFactory::compute_landmark_graph( compute_operators_providing_effect(task_proxy); generate_landmarks(task); - if (log.is_at_least_normal()) { - log << "Landmarks generation time: " << landmark_generation_timer << endl; - if (landmark_graph->get_num_landmarks() == 0) { - if (log.is_warning()) { - log << "Warning! No landmarks found. Task unsolvable?" << endl; - } - } else { - log << "Discovered " << landmark_graph->get_num_landmarks() - << " landmarks, of which " << landmark_graph->get_num_disjunctive_landmarks() - << " are disjunctive and " - << landmark_graph->get_num_conjunctive_landmarks() << " are conjunctive." << endl; - log << landmark_graph->get_num_orderings() << " orderings" << endl; - } - } - - if (log.is_at_least_debug()) { - dump_landmark_graph(task_proxy, *landmark_graph, log); - } + log_landmark_graph_info(task_proxy, landmark_generation_timer); return landmark_graph; } diff --git a/src/search/landmarks/landmark_factory.h b/src/search/landmarks/landmark_factory.h index 119caef178..67ae4fb984 100644 --- a/src/search/landmarks/landmark_factory.h +++ b/src/search/landmarks/landmark_factory.h @@ -21,8 +21,18 @@ class LandmarkFactory { virtual void generate_landmarks( const std::shared_ptr &task) = 0; + void log_landmark_graph_info( + const TaskProxy &task_proxy, + const utils::Timer &landmark_generation_timer) const; + + void resize_operators_providing_effect(const TaskProxy &task_proxy); + void add_operator_or_axiom_providing_effects( + const OperatorProxy &op_or_axiom); void compute_operators_providing_effect(const TaskProxy &task_proxy); + void add_ordering( + LandmarkNode &from, LandmarkNode &to, OrderingType type) const; + protected: mutable utils::LogProxy log; std::shared_ptr landmark_graph; @@ -30,9 +40,10 @@ class LandmarkFactory { explicit LandmarkFactory(utils::Verbosity verbosity); - void add_ordering(LandmarkNode &from, LandmarkNode &to, OrderingType type); + void add_ordering_or_replace_if_stronger( + LandmarkNode &from, LandmarkNode &to, OrderingType type) const; - void discard_all_orderings(); + void discard_all_orderings() const; const std::vector &get_operators_including_effect( const FactPair &eff) const { diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index da86db0b1c..827ea207af 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -995,12 +995,14 @@ void LandmarkFactoryHM::generate_landmarks( assert(landmark_node_table.find(landmark) != landmark_node_table.end()); assert(landmark_node_table.find(set_index) != landmark_node_table.end()); - add_ordering(*landmark_node_table[landmark], *landmark_node_table[set_index], - OrderingType::NATURAL); + add_ordering_or_replace_if_stronger( + *landmark_node_table[landmark], + *landmark_node_table[set_index], OrderingType::NATURAL); } for (int gn : h_m_table_[set_index].necessary) { - add_ordering(*landmark_node_table[gn], *landmark_node_table[set_index], - OrderingType::GREEDY_NECESSARY); + add_ordering_or_replace_if_stronger( + *landmark_node_table[gn], *landmark_node_table[set_index], + OrderingType::GREEDY_NECESSARY); } } } diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index caef6b75d0..a38cd84ad8 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -119,7 +119,8 @@ void LandmarkFactoryMerged::generate_landmarks( OrderingType type = to.second; LandmarkNode *to_node = get_matching_landmark(to_orig->get_landmark()); if (to_node) { - add_ordering(*from, *to_node, type); + add_ordering_or_replace_if_stronger( + *from, *to_node, type); } else { if (log.is_at_least_normal()) { log << "Discarded to ordering" << endl; diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index a88b96c23b..89ede1c82c 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -63,7 +63,8 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orders( if (landmark == landmark2 || landmark2.is_disjunctive) continue; if (interferes(task_proxy, landmark2, landmark)) { - add_ordering(*node2_p, *node_p, OrderingType::REASONABLE); + add_ordering_or_replace_if_stronger( + *node2_p, *node_p, OrderingType::REASONABLE); } } } else { @@ -97,7 +98,8 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orders( if (landmark == landmark2 || landmark2.is_disjunctive) continue; if (interferes(task_proxy, landmark2, landmark)) { - add_ordering(*node2_p, *node_p, OrderingType::REASONABLE); + add_ordering_or_replace_if_stronger( + *node2_p, *node_p, OrderingType::REASONABLE); } } } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index fd7e00148a..247e6a5db8 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -164,7 +164,7 @@ void LandmarkFactoryRpgSasp::found_simple_landmark_and_ordering( if (landmark_graph->contains_simple_landmark(atom)) { LandmarkNode &simple_landmark = landmark_graph->get_simple_landmark_node(atom); - add_ordering(simple_landmark, node, type); + add_ordering_or_replace_if_stronger(simple_landmark, node, type); return; } @@ -204,18 +204,19 @@ void LandmarkFactoryRpgSasp::found_simple_landmark_and_ordering( LandmarkNode &simple_landmark = landmark_graph->add_landmark(move(landmark)); open_landmarks.push_back(&simple_landmark); - add_ordering(simple_landmark, node, type); + add_ordering_or_replace_if_stronger(simple_landmark, node, type); /* Add incoming orderings of replaced `disjunctive_landmark_node` as natural orderings to `simple_landmark`. */ for (LandmarkNode *pred : predecessors) { - add_ordering(*pred, simple_landmark, OrderingType::NATURAL); + add_ordering_or_replace_if_stronger( + *pred, simple_landmark, OrderingType::NATURAL); } } else { LandmarkNode &simple_landmark = landmark_graph->add_landmark(move(landmark)); open_landmarks.push_back(&simple_landmark); - add_ordering(simple_landmark, node, type); + add_ordering_or_replace_if_stronger(simple_landmark, node, type); } } @@ -241,7 +242,7 @@ void LandmarkFactoryRpgSasp::found_disjunctive_landmark_and_ordering( if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) { new_landmark_node = &landmark_graph->get_disjunctive_landmark_node(*atoms.begin()); - add_ordering(*new_landmark_node, node, type); + add_ordering_or_replace_if_stronger(*new_landmark_node, node, type); return; } // Landmark overlaps with existing disjunctive landmark, do not add. @@ -253,7 +254,7 @@ void LandmarkFactoryRpgSasp::found_disjunctive_landmark_and_ordering( atoms.end()), true, false); new_landmark_node = &landmark_graph->add_landmark(move(landmark)); open_landmarks.push_back(new_landmark_node); - add_ordering(*new_landmark_node, node, type); + add_ordering_or_replace_if_stronger(*new_landmark_node, node, type); } void LandmarkFactoryRpgSasp::compute_shared_preconditions( @@ -629,7 +630,8 @@ void LandmarkFactoryRpgSasp::add_landmark_forward_orderings() { if (landmark_graph->contains_simple_landmark(node2_pair)) { LandmarkNode &node2 = landmark_graph->get_simple_landmark_node(node2_pair); - add_ordering(*node, node2, OrderingType::NATURAL); + add_ordering_or_replace_if_stronger( + *node, node2, OrderingType::NATURAL); } } forward_orders[node.get()].clear(); diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index d7b230cc0d..15659a14e9 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -96,7 +96,8 @@ void LandmarkFactoryZhuGivan::extract_landmarks( // Add order: lm ->_{nat} lm assert(node->parents.find(lm_node) == node->parents.end()); assert(lm_node->children.find(node) == lm_node->children.end()); - add_ordering(*node, *lm_node, OrderingType::NATURAL); + add_ordering_or_replace_if_stronger( + *node, *lm_node, OrderingType::NATURAL); } } } From 059a6b29465ede069466067489456f0b9e80c3af Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 12 Mar 2025 20:05:30 +0100 Subject: [PATCH 025/108] Fix failing assertion. --- src/search/landmarks/landmark_factory.cc | 25 ++++++++++++------------ 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index db9a91a555..43d1a5d7b3 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -63,24 +63,23 @@ static bool weaker_ordering_exists( } } -static void remove_weaker_ordering(LandmarkNode &from, LandmarkNode &to) { +static void remove_ordering(LandmarkNode &from, LandmarkNode &to) { + assert(from.children.contains(&to)); + assert(to.parents.contains(&from)); from.children.erase(&to); - assert(to.parents.find(&from) != to.parents.end()); to.parents.erase(&from); - - assert(to.parents.find(&from) == to.parents.end()); - assert(from.children.find(&to) == from.children.end()); + assert(!to.parents.contains(&from)); + assert(!from.children.contains(&to)); } void LandmarkFactory::add_ordering( LandmarkNode &from, LandmarkNode &to, OrderingType type) const { - if (from.children.contains(&to)) { - assert(to.parents.contains(&from)); - from.children.emplace(&to, type); - to.parents.emplace(&from, type); - if (log.is_at_least_debug()) { - log << "added parent with address " << &from << endl; - } + assert(!from.children.contains(&to)); + assert(!to.parents.contains(&from)); + from.children.emplace(&to, type); + to.parents.emplace(&from, type); + if (log.is_at_least_debug()) { + log << "added parent with address " << &from << endl; } } @@ -92,7 +91,7 @@ void LandmarkFactory::add_ordering_or_replace_if_stronger( assert(&from != &to); if (weaker_ordering_exists(from, to, type)) { - remove_weaker_ordering(from, to); + remove_ordering(from, to); } if (!from.children.contains(&to)) { add_ordering(from, to, type); From 2a91937c250200c6de3f026f8c21e0a9056a3898 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Mar 2025 20:45:33 +0100 Subject: [PATCH 026/108] Clean up hm-factory header file. --- src/search/landmarks/landmark_factory_h_m.cc | 252 +++++++++---------- src/search/landmarks/landmark_factory_h_m.h | 138 +++++----- 2 files changed, 203 insertions(+), 187 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 827ea207af..8548108123 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -107,7 +107,7 @@ static bool contains(list &alist, const T &val) { // find partial variable assignments with size m or less // (look at all the variables in the problem) -void LandmarkFactoryHM::get_m_sets_(const VariablesProxy &variables, int m, int num_included, int current_var, +void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, int m, int num_included, int current_var, FluentSet ¤t, vector &subsets) { int num_variables = variables.size(); @@ -134,12 +134,12 @@ void LandmarkFactoryHM::get_m_sets_(const VariablesProxy &variables, int m, int if (use_var) { current.push_back(current_var_fact); - get_m_sets_(variables, m, num_included + 1, current_var + 1, current, subsets); + get_m_sets(variables, m, num_included + 1, current_var + 1, current, subsets); current.pop_back(); } } // don't include a value of current_var in the set - get_m_sets_(variables, m, num_included, current_var + 1, current, subsets); + get_m_sets(variables, m, num_included, current_var + 1, current, subsets); } // find all size m or less subsets of superset @@ -263,7 +263,7 @@ void LandmarkFactoryHM::get_split_m_sets( // get partial assignments of size <= m in the problem void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, int m, vector &subsets) { FluentSet c; - get_m_sets_(variables, m, 0, 0, c, subsets); + get_m_sets(variables, m, 0, 0, c, subsets); } // get subsets of superset with size <= m @@ -335,33 +335,33 @@ static FluentSet get_operator_postcondition(int num_vars, const OperatorProxy &o } -void LandmarkFactoryHM::print_pm_op(const VariablesProxy &variables, const PMOp &op) const { +void LandmarkFactoryHM::print_pm_operator(const VariablesProxy &variables, const PiMOperator &op) const { if (log.is_at_least_verbose()) { set pcs, effs, cond_pc, cond_eff; vector, set>> conds; - for (int pc : op.pc) { - for (const FactPair &fluent : h_m_table_[pc].fluents) { + for (int pc : op.precondition) { + for (const FactPair &fluent : hm_table[pc].fluents) { pcs.insert(fluent); } } - for (int eff : op.eff) { - for (const FactPair &fluent : h_m_table_[eff].fluents) { + for (int eff : op.effect) { + for (const FactPair &fluent : hm_table[eff].fluents) { effs.insert(fluent); } } - for (size_t i = 0; i < op.cond_noops.size(); ++i) { + for (size_t i = 0; i < op.conditional_noops.size(); ++i) { cond_pc.clear(); cond_eff.clear(); int pm_fluent; size_t j; log << "PC:" << endl; - for (j = 0; (pm_fluent = op.cond_noops[i][j]) != -1; ++j) { - print_fluentset(variables, h_m_table_[pm_fluent].fluents); + for (j = 0; (pm_fluent = op.conditional_noops[i][j]) != -1; ++j) { + print_fluent_set(variables, hm_table[pm_fluent].fluents); log << endl; - for (size_t k = 0; k < h_m_table_[pm_fluent].fluents.size(); ++k) { - cond_pc.insert(h_m_table_[pm_fluent].fluents[k]); + for (size_t k = 0; k < hm_table[pm_fluent].fluents.size(); ++k) { + cond_pc.insert(hm_table[pm_fluent].fluents[k]); } } // advance to effects section @@ -369,14 +369,14 @@ void LandmarkFactoryHM::print_pm_op(const VariablesProxy &variables, const PMOp ++j; log << "EFF:" << endl; - for (; j < op.cond_noops[i].size(); ++j) { - int pm_fluent = op.cond_noops[i][j]; + for (; j < op.conditional_noops[i].size(); ++j) { + int pm_fluent = op.conditional_noops[i][j]; - print_fluentset(variables, h_m_table_[pm_fluent].fluents); + print_fluent_set(variables, hm_table[pm_fluent].fluents); log << endl; - for (size_t k = 0; k < h_m_table_[pm_fluent].fluents.size(); ++k) { - cond_eff.insert(h_m_table_[pm_fluent].fluents[k]); + for (size_t k = 0; k < hm_table[pm_fluent].fluents.size(); ++k) { + cond_eff.insert(hm_table[pm_fluent].fluents[k]); } } conds.emplace_back(cond_pc, cond_eff); @@ -413,7 +413,7 @@ void LandmarkFactoryHM::print_pm_op(const VariablesProxy &variables, const PMOp } } -void LandmarkFactoryHM::print_fluentset(const VariablesProxy &variables, const FluentSet &fs) const { +void LandmarkFactoryHM::print_fluent_set(const VariablesProxy &variables, const FluentSet &fs) const { if (log.is_at_least_verbose()) { log << "( "; for (const FactPair &fact : fs) { @@ -455,7 +455,7 @@ bool LandmarkFactoryHM::possible_noop_set(const VariablesProxy &variables, // make the operators of the P_m problem -void LandmarkFactoryHM::build_pm_ops(const TaskProxy &task_proxy) { +void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { FluentSet pc, eff; vector pc_subsets, eff_subsets, noop_pc_subsets, noop_eff_subsets; @@ -463,17 +463,17 @@ void LandmarkFactoryHM::build_pm_ops(const TaskProxy &task_proxy) { int set_index, noop_index; OperatorsProxy operators = task_proxy.get_operators(); - pm_ops_.resize(operators.size()); + pm_operators.resize(operators.size()); // set unsatisfied precondition counts, used in fixpoint calculation - unsat_pc_count_.resize(operators.size()); + unsatisfied_precondition_count.resize(operators.size()); VariablesProxy variables = task_proxy.get_variables(); // transfer ops from original problem // represent noops as "conditional" effects for (OperatorProxy op : operators) { - PMOp &pm_op = pm_ops_[op.get_id()]; + PiMOperator &pm_op = pm_operators[op.get_id()]; pm_op.index = op_count++; pc_subsets.clear(); @@ -481,28 +481,28 @@ void LandmarkFactoryHM::build_pm_ops(const TaskProxy &task_proxy) { // preconditions of P_m op are all subsets of original pc pc = get_operator_precondition(op); - get_m_sets(variables, m_, pc_subsets, pc); - pm_op.pc.reserve(pc_subsets.size()); + get_m_sets(variables, m, pc_subsets, pc); + pm_op.precondition.reserve(pc_subsets.size()); // set unsatisfied pc count for op - unsat_pc_count_[op.get_id()].first = pc_subsets.size(); + unsatisfied_precondition_count[op.get_id()].first = pc_subsets.size(); for (const FluentSet &pc_subset : pc_subsets) { - assert(set_indices_.find(pc_subset) != set_indices_.end()); - set_index = set_indices_[pc_subset]; - pm_op.pc.push_back(set_index); - h_m_table_[set_index].pc_for.emplace_back(op.get_id(), -1); + assert(set_indices.find(pc_subset) != set_indices.end()); + set_index = set_indices[pc_subset]; + pm_op.precondition.push_back(set_index); + hm_table[set_index].pc_for.emplace_back(op.get_id(), -1); } // same for effects eff = get_operator_postcondition(variables.size(), op); - get_m_sets(variables, m_, eff_subsets, eff); - pm_op.eff.reserve(eff_subsets.size()); + get_m_sets(variables, m, eff_subsets, eff); + pm_op.effect.reserve(eff_subsets.size()); for (const FluentSet &eff_subset : eff_subsets) { - assert(set_indices_.find(eff_subset) != set_indices_.end()); - set_index = set_indices_[eff_subset]; - pm_op.eff.push_back(set_index); + assert(set_indices.find(eff_subset) != set_indices.end()); + set_index = set_indices[eff_subset]; + pm_op.effect.push_back(set_index); } noop_index = 0; @@ -511,14 +511,14 @@ void LandmarkFactoryHM::build_pm_ops(const TaskProxy &task_proxy) { // they conflict with the effect of the operator (no need to check pc // because mvvs appearing in pc also appear in effect - FluentSetToIntMap::const_iterator it = set_indices_.begin(); - while (static_cast(it->first.size()) < m_ - && it != set_indices_.end()) { + FluentSetToIntMap::const_iterator it = set_indices.begin(); + while (static_cast(it->first.size()) < m + && it != set_indices.end()) { if (possible_noop_set(variables, eff, it->first)) { // for each such set, add a "conditional effect" to the operator - pm_op.cond_noops.resize(pm_op.cond_noops.size() + 1); + pm_op.conditional_noops.resize(pm_op.conditional_noops.size() + 1); - vector &this_cond_noop = pm_op.cond_noops.back(); + vector &this_cond_noop = pm_op.conditional_noops.back(); noop_pc_subsets.clear(); noop_eff_subsets.clear(); @@ -526,22 +526,22 @@ void LandmarkFactoryHM::build_pm_ops(const TaskProxy &task_proxy) { // get the subsets that have >= 1 element in the pc (unless pc is empty) // and >= 1 element in the other set - get_split_m_sets(variables, m_, noop_pc_subsets, pc, it->first); - get_split_m_sets(variables, m_, noop_eff_subsets, eff, it->first); + get_split_m_sets(variables, m, noop_pc_subsets, pc, it->first); + get_split_m_sets(variables, m, noop_eff_subsets, eff, it->first); this_cond_noop.reserve(noop_pc_subsets.size() + noop_eff_subsets.size() + 1); - unsat_pc_count_[op.get_id()].second.push_back(noop_pc_subsets.size()); + unsatisfied_precondition_count[op.get_id()].second.push_back(noop_pc_subsets.size()); // push back all noop preconditions for (size_t j = 0; j < noop_pc_subsets.size(); ++j) { - assert(static_cast(noop_pc_subsets[j].size()) <= m_); - assert(set_indices_.find(noop_pc_subsets[j]) != set_indices_.end()); + assert(static_cast(noop_pc_subsets[j].size()) <= m); + assert(set_indices.find(noop_pc_subsets[j]) != set_indices.end()); - set_index = set_indices_[noop_pc_subsets[j]]; + set_index = set_indices[noop_pc_subsets[j]]; this_cond_noop.push_back(set_index); // these facts are "conditional pcs" for this action - h_m_table_[set_index].pc_for.emplace_back(op.get_id(), noop_index); + hm_table[set_index].pc_for.emplace_back(op.get_id(), noop_index); } // separator @@ -549,10 +549,10 @@ void LandmarkFactoryHM::build_pm_ops(const TaskProxy &task_proxy) { // and the noop effects for (size_t j = 0; j < noop_eff_subsets.size(); ++j) { - assert(static_cast(noop_eff_subsets[j].size()) <= m_); - assert(set_indices_.find(noop_eff_subsets[j]) != set_indices_.end()); + assert(static_cast(noop_eff_subsets[j].size()) <= m); + assert(set_indices.find(noop_eff_subsets[j]) != set_indices.end()); - set_index = set_indices_[noop_eff_subsets[j]]; + set_index = set_indices[noop_eff_subsets[j]]; this_cond_noop.push_back(set_index); } @@ -560,7 +560,7 @@ void LandmarkFactoryHM::build_pm_ops(const TaskProxy &task_proxy) { } ++it; } - print_pm_op(variables, pm_op); + print_pm_operator(variables, pm_op); } } @@ -575,14 +575,14 @@ LandmarkFactoryHM::LandmarkFactoryHM( int m, bool conjunctive_landmarks, bool use_orders, utils::Verbosity verbosity) : LandmarkFactory(verbosity), - m_(m), + m(m), conjunctive_landmarks(conjunctive_landmarks), use_orders(use_orders) { } void LandmarkFactoryHM::initialize(const TaskProxy &task_proxy) { if (log.is_at_least_normal()) { - log << "h^m landmarks m=" << m_ << endl; + log << "h^m landmarks m=" << m << endl; } if (!task_proxy.get_axioms().empty()) { cerr << "h^m landmarks don't support axioms" << endl; @@ -590,19 +590,19 @@ void LandmarkFactoryHM::initialize(const TaskProxy &task_proxy) { } // Get all the m or less size subsets in the domain. vector> msets; - get_m_sets(task_proxy.get_variables(), m_, msets); + get_m_sets(task_proxy.get_variables(), m, msets); // map each set to an integer for (size_t i = 0; i < msets.size(); ++i) { - h_m_table_.emplace_back(); - set_indices_[msets[i]] = i; - h_m_table_[i].fluents = msets[i]; + hm_table.emplace_back(); + set_indices[msets[i]] = i; + hm_table[i].fluents = msets[i]; } if (log.is_at_least_normal()) { - log << "Using " << h_m_table_.size() << " P^m fluents." << endl; + log << "Using " << hm_table.size() << " P^m fluents." << endl; } - build_pm_ops(task_proxy); + build_pm_operators(task_proxy); } void LandmarkFactoryHM::postprocess(const TaskProxy &task_proxy) { @@ -685,28 +685,28 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { } void LandmarkFactoryHM::free_unneeded_memory() { - utils::release_vector_memory(h_m_table_); - utils::release_vector_memory(pm_ops_); - utils::release_vector_memory(unsat_pc_count_); + utils::release_vector_memory(hm_table); + utils::release_vector_memory(pm_operators); + utils::release_vector_memory(unsatisfied_precondition_count); - set_indices_.clear(); + set_indices.clear(); landmark_node_table.clear(); } // called when a fact is discovered or its landmarks change // to trigger required actions at next level // newly_discovered = first time fact becomes reachable -void LandmarkFactoryHM::propagate_pm_fact(int factindex, bool newly_discovered, +void LandmarkFactoryHM::propagate_pm_atoms(int atom_index, bool newly_discovered, TriggerSet &trigger) { // for each action/noop for which fact is a pc - for (const FactPair &info : h_m_table_[factindex].pc_for) { + for (const FactPair &info : hm_table[atom_index].pc_for) { // a pc for the action itself if (info.value == -1) { if (newly_discovered) { - --unsat_pc_count_[info.var].first; + --unsatisfied_precondition_count[info.var].first; } // add to queue if unsatcount at 0 - if (unsat_pc_count_[info.var].first == 0) { + if (unsatisfied_precondition_count[info.var].first == 0) { // create empty set or clear prev entries -- signals do all possible noop effects trigger[info.var].clear(); } @@ -714,12 +714,12 @@ void LandmarkFactoryHM::propagate_pm_fact(int factindex, bool newly_discovered, // a pc for a conditional noop else { if (newly_discovered) { - --unsat_pc_count_[info.var].second[info.value]; + --unsatisfied_precondition_count[info.var].second[info.value]; } // if associated action is applicable, and effect has become applicable // (if associated action is not applicable, all noops will be used when it first does) - if ((unsat_pc_count_[info.var].first == 0) && - (unsat_pc_count_[info.var].second[info.value] == 0)) { + if ((unsatisfied_precondition_count[info.var].first == 0) && + (unsatisfied_precondition_count[info.var].second[info.value] == 0)) { // if not already triggering all noops, add this one if ((trigger.find(info.var) == trigger.end()) || (!trigger[info.var].empty())) { @@ -730,25 +730,25 @@ void LandmarkFactoryHM::propagate_pm_fact(int factindex, bool newly_discovered, } } -void LandmarkFactoryHM::compute_h_m_landmarks(const TaskProxy &task_proxy) { +void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { // get subsets of initial state vector init_subsets; - get_m_sets(task_proxy.get_variables(), m_, init_subsets, task_proxy.get_initial_state()); + get_m_sets(task_proxy.get_variables(), m, init_subsets, task_proxy.get_initial_state()); TriggerSet current_trigger, next_trigger; // for all of the initial state <= m subsets, mark level = 0 for (size_t i = 0; i < init_subsets.size(); ++i) { - int index = set_indices_[init_subsets[i]]; - h_m_table_[index].level = 0; + int index = set_indices[init_subsets[i]]; + hm_table[index].level = 0; // set actions to be applied - propagate_pm_fact(index, true, current_trigger); + propagate_pm_atoms(index, true, current_trigger); } // mark actions with no precondition to be applied - for (size_t i = 0; i < pm_ops_.size(); ++i) { - if (unsat_pc_count_[i].first == 0) { + for (size_t i = 0; i < pm_operators.size(); ++i) { + if (unsatisfied_precondition_count[i].first == 0) { // create empty set or clear prev entries current_trigger[i].clear(); } @@ -771,13 +771,13 @@ void LandmarkFactoryHM::compute_h_m_landmarks(const TaskProxy &task_proxy) { local_necessary.clear(); int op_index = op_it->first; - PMOp &action = pm_ops_[op_index]; + PiMOperator &action = pm_operators[op_index]; // gather landmarks for pcs // in the set of landmarks for each fact, the fact itself is not stored // (only landmarks preceding it) - for (it = action.pc.begin(); it != action.pc.end(); ++it) { - union_with(local_landmarks, h_m_table_[*it].landmarks); + for (it = action.precondition.begin(); it != action.precondition.end(); ++it) { + union_with(local_landmarks, hm_table[*it].landmarks); insert_into(local_landmarks, *it); if (use_orders) { @@ -785,42 +785,42 @@ void LandmarkFactoryHM::compute_h_m_landmarks(const TaskProxy &task_proxy) { } } - for (it = action.eff.begin(); it != action.eff.end(); ++it) { - if (h_m_table_[*it].level != -1) { - prev_size = h_m_table_[*it].landmarks.size(); - intersect_with(h_m_table_[*it].landmarks, local_landmarks); + for (it = action.effect.begin(); it != action.effect.end(); ++it) { + if (hm_table[*it].level != -1) { + prev_size = hm_table[*it].landmarks.size(); + intersect_with(hm_table[*it].landmarks, local_landmarks); // if the add effect appears in local landmarks, // fact is being achieved for >1st time // no need to intersect for gn orderings // or add op to first achievers if (!contains(local_landmarks, *it)) { - insert_into(h_m_table_[*it].first_achievers, op_index); + insert_into(hm_table[*it].first_achievers, op_index); if (use_orders) { - intersect_with(h_m_table_[*it].necessary, local_necessary); + intersect_with(hm_table[*it].necessary, local_necessary); } } - if (h_m_table_[*it].landmarks.size() != prev_size) - propagate_pm_fact(*it, false, next_trigger); + if (hm_table[*it].landmarks.size() != prev_size) + propagate_pm_atoms(*it, false, next_trigger); } else { - h_m_table_[*it].level = level; - h_m_table_[*it].landmarks = local_landmarks; + hm_table[*it].level = level; + hm_table[*it].landmarks = local_landmarks; if (use_orders) { - h_m_table_[*it].necessary = local_necessary; + hm_table[*it].necessary = local_necessary; } - insert_into(h_m_table_[*it].first_achievers, op_index); - propagate_pm_fact(*it, true, next_trigger); + insert_into(hm_table[*it].first_achievers, op_index); + propagate_pm_atoms(*it, true, next_trigger); } } // landmarks changed for action itself, have to recompute // landmarks for all noop effects if (op_it->second.empty()) { - for (size_t i = 0; i < action.cond_noops.size(); ++i) { + for (size_t i = 0; i < action.conditional_noops.size(); ++i) { // actions pcs are satisfied, but cond. effects may still have // unsatisfied pcs - if (unsat_pc_count_[op_index].second[i] == 0) { + if (unsatisfied_precondition_count[op_index].second[i] == 0) { compute_noop_landmarks(op_index, i, local_landmarks, local_necessary, @@ -833,7 +833,7 @@ void LandmarkFactoryHM::compute_h_m_landmarks(const TaskProxy &task_proxy) { else { for (set::iterator noop_it = op_it->second.begin(); noop_it != op_it->second.end(); ++noop_it) { - assert(unsat_pc_count_[op_index].second[*noop_it] == 0); + assert(unsatisfied_precondition_count[op_index].second[*noop_it] == 0); compute_noop_landmarks(op_index, *noop_it, local_landmarks, @@ -865,8 +865,8 @@ void LandmarkFactoryHM::compute_noop_landmarks( size_t prev_size; int pm_fluent; - PMOp &action = pm_ops_[op_index]; - vector &pc_eff_pair = action.cond_noops[noop_index]; + PiMOperator &action = pm_operators[op_index]; + vector &pc_eff_pair = action.conditional_noops[noop_index]; cn_landmarks.clear(); @@ -879,7 +879,7 @@ void LandmarkFactoryHM::compute_noop_landmarks( size_t i; for (i = 0; (pm_fluent = pc_eff_pair[i]) != -1; ++i) { - union_with(cn_landmarks, h_m_table_[pm_fluent].landmarks); + union_with(cn_landmarks, hm_table[pm_fluent].landmarks); insert_into(cn_landmarks, pm_fluent); if (use_orders) { @@ -892,38 +892,38 @@ void LandmarkFactoryHM::compute_noop_landmarks( for (; i < pc_eff_pair.size(); ++i) { pm_fluent = pc_eff_pair[i]; - if (h_m_table_[pm_fluent].level != -1) { - prev_size = h_m_table_[pm_fluent].landmarks.size(); - intersect_with(h_m_table_[pm_fluent].landmarks, cn_landmarks); + if (hm_table[pm_fluent].level != -1) { + prev_size = hm_table[pm_fluent].landmarks.size(); + intersect_with(hm_table[pm_fluent].landmarks, cn_landmarks); // if the add effect appears in cn_landmarks, // fact is being achieved for >1st time // no need to intersect for gn orderings // or add op to first achievers if (!contains(cn_landmarks, pm_fluent)) { - insert_into(h_m_table_[pm_fluent].first_achievers, op_index); + insert_into(hm_table[pm_fluent].first_achievers, op_index); if (use_orders) { - intersect_with(h_m_table_[pm_fluent].necessary, cn_necessary); + intersect_with(hm_table[pm_fluent].necessary, cn_necessary); } } - if (h_m_table_[pm_fluent].landmarks.size() != prev_size) - propagate_pm_fact(pm_fluent, false, next_trigger); + if (hm_table[pm_fluent].landmarks.size() != prev_size) + propagate_pm_atoms(pm_fluent, false, next_trigger); } else { - h_m_table_[pm_fluent].level = level; - h_m_table_[pm_fluent].landmarks = cn_landmarks; + hm_table[pm_fluent].level = level; + hm_table[pm_fluent].landmarks = cn_landmarks; if (use_orders) { - h_m_table_[pm_fluent].necessary = cn_necessary; + hm_table[pm_fluent].necessary = cn_necessary; } - insert_into(h_m_table_[pm_fluent].first_achievers, op_index); - propagate_pm_fact(pm_fluent, true, next_trigger); + insert_into(hm_table[pm_fluent].first_achievers, op_index); + propagate_pm_atoms(pm_fluent, true, next_trigger); } } } void LandmarkFactoryHM::add_landmark_node(int set_index, bool goal) { if (landmark_node_table.find(set_index) == landmark_node_table.end()) { - const HMEntry &hm_entry = h_m_table_[set_index]; + const HMEntry &hm_entry = hm_table[set_index]; vector facts(hm_entry.fluents); utils::sort_unique(facts); assert(!facts.empty()); @@ -939,29 +939,29 @@ void LandmarkFactoryHM::generate_landmarks( const shared_ptr &task) { TaskProxy task_proxy(*task); initialize(task_proxy); - compute_h_m_landmarks(task_proxy); + compute_hm_landmarks(task_proxy); // now construct landmarks graph vector goal_subsets; FluentSet goals = task_properties::get_fact_pairs(task_proxy.get_goals()); VariablesProxy variables = task_proxy.get_variables(); - get_m_sets(variables, m_, goal_subsets, goals); + get_m_sets(variables, m, goal_subsets, goals); list all_landmarks; for (const FluentSet &goal_subset : goal_subsets) { - assert(set_indices_.find(goal_subset) != set_indices_.end()); + assert(set_indices.find(goal_subset) != set_indices.end()); - int set_index = set_indices_[goal_subset]; + int set_index = set_indices[goal_subset]; - if (h_m_table_[set_index].level == -1) { + if (hm_table[set_index].level == -1) { if (log.is_at_least_verbose()) { log << endl << endl << "Subset of goal not reachable !!." << endl << endl << endl; log << "Subset is: "; - print_fluentset(variables, h_m_table_[set_index].fluents); + print_fluent_set(variables, hm_table[set_index].fluents); log << endl; } } // set up goals landmarks for processing - union_with(all_landmarks, h_m_table_[set_index].landmarks); + union_with(all_landmarks, hm_table[set_index].landmarks); // the goal itself is also a landmark insert_into(all_landmarks, set_index); @@ -978,20 +978,20 @@ void LandmarkFactoryHM::generate_landmarks( // if f2 is landmark for f1, subtract landmark set of f2 from that of f1 for (int f1 : all_landmarks) { list everything_to_remove; - for (int f2 : h_m_table_[f1].landmarks) { - union_with(everything_to_remove, h_m_table_[f2].landmarks); + for (int f2 : hm_table[f1].landmarks) { + union_with(everything_to_remove, hm_table[f2].landmarks); } - set_minus(h_m_table_[f1].landmarks, everything_to_remove); + set_minus(hm_table[f1].landmarks, everything_to_remove); // remove necessaries here, otherwise they will be overwritten // since we are writing them as greedy nec. orderings. if (use_orders) - set_minus(h_m_table_[f1].landmarks, h_m_table_[f1].necessary); + set_minus(hm_table[f1].landmarks, hm_table[f1].necessary); } // add the orderings. for (int set_index : all_landmarks) { - for (int landmark : h_m_table_[set_index].landmarks) { + for (int landmark : hm_table[set_index].landmarks) { assert(landmark_node_table.find(landmark) != landmark_node_table.end()); assert(landmark_node_table.find(set_index) != landmark_node_table.end()); @@ -999,7 +999,7 @@ void LandmarkFactoryHM::generate_landmarks( *landmark_node_table[landmark], *landmark_node_table[set_index], OrderingType::NATURAL); } - for (int gn : h_m_table_[set_index].necessary) { + for (int gn : hm_table[set_index].necessary) { add_ordering_or_replace_if_stronger( *landmark_node_table[gn], *landmark_node_table[set_index], OrderingType::GREEDY_NECESSARY); diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index f3b646c088..318294f5cf 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -9,8 +9,7 @@ namespace landmarks { using FluentSet = std::vector; -std::ostream & -operator<<(std::ostream &os, const FluentSet &fs); +std::ostream &operator<<(std::ostream &os, const FluentSet &fs); struct FluentSetComparer { bool operator()(const FluentSet &fs1, const FluentSet &fs2) const { @@ -18,40 +17,48 @@ struct FluentSetComparer { return fs1.size() < fs2.size(); } for (size_t i = 0; i < fs1.size(); ++i) { - if (fs1[i] != fs2[i]) + if (fs1[i] != fs2[i]) { return fs1[i] < fs2[i]; + } } return false; } }; -// an operator in P_m. Corresponds to an operator from the original problem, -// as well as a set of conditional effects that correspond to noops -struct PMOp { - std::vector pc; - std::vector eff; +/* Corresponds to an operator from the original problem, as well as a + set of conditional effects that correspond to noops. */ +struct PiMOperator { + std::vector precondition; + std::vector effect; + // TODO: Is this still the case? // pc separated from effect by a value of -1 - std::vector> cond_noops; + std::vector> conditional_noops; int index; }; -// represents a fluent in the P_m problem +// represents a fluent in the P^m problem struct HMEntry { - // propositions that belong to this set + // Propositions that belong to this set. FluentSet fluents; // -1 -> current cost infinite // 0 -> present in initial state int level; + // TODO: Can we replace the `list` data type? std::list landmarks; - std::list necessary; // greedy necessary landmarks, disjoint from landmarks + // TODO: What does the following comment mean? What is a "greedy necessary landmark"? + // Greedy necessary landmarks, disjoint from landmarks + std::list necessary; std::list first_achievers; - // first int = op index, second int conditional noop effect - // -1 for op itself + /* TODO: What's the meaning of this? Is it actually using a FactPair to + represent something completely unrelated?!? */ + /* First int = op index, second int conditional noop effect + -1 for op itself */ std::vector pc_for; + // TODO: Maybe set the fluents in the constructor as well? HMEntry() : level(-1) { } @@ -62,22 +69,24 @@ using FluentSetToIntMap = std::map; class LandmarkFactoryHM : public LandmarkFactory { using TriggerSet = std::unordered_map>; - virtual void generate_landmarks(const std::shared_ptr &task) override; + virtual void generate_landmarks( + const std::shared_ptr &task) override; - void compute_h_m_landmarks(const TaskProxy &task_proxy); + void compute_hm_landmarks(const TaskProxy &task_proxy); void compute_noop_landmarks(int op_index, int noop_index, std::list const &local_landmarks, std::list const &local_necessary, int level, TriggerSet &next_trigger); - void propagate_pm_fact(int factindex, bool newly_discovered, - TriggerSet &trigger); + void propagate_pm_atoms(int atom_index, bool newly_discovered, + TriggerSet &trigger); bool possible_noop_set(const VariablesProxy &variables, const FluentSet &fs1, const FluentSet &fs2); - void build_pm_ops(const TaskProxy &task_proxy); + void build_pm_operators(const TaskProxy &task_proxy); + // TODO: What is interesting? bool interesting(const VariablesProxy &variables, const FactPair &fact1, const FactPair &fact2) const; @@ -93,52 +102,59 @@ class LandmarkFactoryHM : public LandmarkFactory { void initialize(const TaskProxy &task_proxy); void free_unneeded_memory(); - void print_fluentset(const VariablesProxy &variables, const FluentSet &fs) const; - void print_pm_op(const VariablesProxy &variables, const PMOp &op) const; + void print_fluent_set( + const VariablesProxy &variables, const FluentSet &fs) const; + void print_pm_operator( + const VariablesProxy &variables, const PiMOperator &op) const; - const int m_; + const int m; const bool conjunctive_landmarks; const bool use_orders; - std::map landmark_node_table; - - std::vector h_m_table_; - std::vector pm_ops_; - // maps each >> unsat_pc_count_; - - void get_m_sets_(const VariablesProxy &variables, int m, int num_included, int current_var, - FluentSet ¤t, - std::vector &subsets); - - void get_m_sets_of_set(const VariablesProxy &variables, - int m, int num_included, - int current_var_index, - FluentSet ¤t, - std::vector &subsets, - const FluentSet &superset); - - void get_split_m_sets(const VariablesProxy &variables, int m, - int ss1_num_included, int ss2_num_included, - int ss1_var_index, int ss2_var_index, - FluentSet ¤t, - std::vector &subsets, - const FluentSet &superset1, const FluentSet &superset2); - - void get_m_sets(const VariablesProxy &variables, int m, std::vector &subsets); - - void get_m_sets(const VariablesProxy &variables, int m, std::vector &subsets, - const FluentSet &superset); - - void get_m_sets(const VariablesProxy &variables, int m, std::vector &subsets, - const State &state); - - void get_split_m_sets(const VariablesProxy &variables, int m, std::vector &subsets, - const FluentSet &superset1, const FluentSet &superset2); - void print_proposition(const VariablesProxy &variables, const FactPair &fluent) const; + std::unordered_map landmark_node_table; + + std::vector hm_table; + std::vector pm_operators; + // Maps each set of >> unsatisfied_precondition_count; + + void get_m_sets( + const VariablesProxy &variables, int m, int num_included, + int current_var, FluentSet ¤t, std::vector &subsets); + + void get_m_sets_of_set( + const VariablesProxy &variables, int m, int num_included, + int current_var_index, FluentSet ¤t, + std::vector &subsets, const FluentSet &superset); + + void get_split_m_sets( + const VariablesProxy &variables, int m, int ss1_num_included, + int ss2_num_included, int ss1_var_index, int ss2_var_index, + FluentSet ¤t, std::vector &subsets, + const FluentSet &superset1, const FluentSet &superset2); + + void get_m_sets(const VariablesProxy &variables, int m, + std::vector &subsets); + + void get_m_sets(const VariablesProxy &variables, int m, + std::vector &subsets, const FluentSet &superset); + + void get_m_sets(const VariablesProxy &variables, int m, + std::vector &subsets, const State &state); + + void get_split_m_sets( + const VariablesProxy &variables, int m, std::vector &subsets, + const FluentSet &superset1, const FluentSet &superset2); + + void print_proposition( + const VariablesProxy &variables, const FactPair &fluent) const; public: LandmarkFactoryHM(int m, bool conjunctive_landmarks, From 1c7ebe44faf26591db3a97c8b490c45ab1e04806 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Mar 2025 22:49:21 +0100 Subject: [PATCH 027/108] Remove redundant m-parameter from functions. --- src/search/landmarks/landmark_factory_h_m.cc | 56 ++++++++++---------- src/search/landmarks/landmark_factory_h_m.h | 16 +++--- 2 files changed, 36 insertions(+), 36 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 8548108123..41798c43ab 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -107,9 +107,9 @@ static bool contains(list &alist, const T &val) { // find partial variable assignments with size m or less // (look at all the variables in the problem) -void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, int m, int num_included, int current_var, - FluentSet ¤t, - vector &subsets) { +void LandmarkFactoryHM::get_m_sets( + const VariablesProxy &variables, int num_included, int current_var, + FluentSet ¤t, vector &subsets) { int num_variables = variables.size(); if (num_included == m) { subsets.push_back(current); @@ -134,17 +134,17 @@ void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, int m, int n if (use_var) { current.push_back(current_var_fact); - get_m_sets(variables, m, num_included + 1, current_var + 1, current, subsets); + get_m_sets(variables, num_included + 1, current_var + 1, current, subsets); current.pop_back(); } } // don't include a value of current_var in the set - get_m_sets(variables, m, num_included, current_var + 1, current, subsets); + get_m_sets(variables, num_included, current_var + 1, current, subsets); } // find all size m or less subsets of superset void LandmarkFactoryHM::get_m_sets_of_set(const VariablesProxy &variables, - int m, int num_included, + int num_included, int current_var_index, FluentSet ¤t, vector &subsets, @@ -172,19 +172,19 @@ void LandmarkFactoryHM::get_m_sets_of_set(const VariablesProxy &variables, if (use_var) { // include current fluent in the set current.push_back(superset[current_var_index]); - get_m_sets_of_set(variables, m, num_included + 1, current_var_index + 1, current, subsets, superset); + get_m_sets_of_set(variables, num_included + 1, current_var_index + 1, current, subsets, superset); current.pop_back(); } // don't include current fluent in set - get_m_sets_of_set(variables, m, num_included, current_var_index + 1, current, subsets, superset); + get_m_sets_of_set(variables, num_included, current_var_index + 1, current, subsets, superset); } // get subsets of superset1 \cup superset2 with size m or less, // such that they have >= 1 elements from each set. void LandmarkFactoryHM::get_split_m_sets( const VariablesProxy &variables, - int m, int ss1_num_included, int ss2_num_included, + int ss1_num_included, int ss2_num_included, int ss1_var_index, int ss2_var_index, FluentSet ¤t, vector &subsets, const FluentSet &superset1, const FluentSet &superset2) { @@ -223,14 +223,14 @@ void LandmarkFactoryHM::get_split_m_sets( if (use_var) { // include current.push_back(superset1[ss1_var_index]); - get_split_m_sets(variables, m, ss1_num_included + 1, ss2_num_included, + get_split_m_sets(variables, ss1_num_included + 1, ss2_num_included, ss1_var_index + 1, ss2_var_index, current, subsets, superset1, superset2); current.pop_back(); } // don't include - get_split_m_sets(variables, m, ss1_num_included, ss2_num_included, + get_split_m_sets(variables, ss1_num_included, ss2_num_included, ss1_var_index + 1, ss2_var_index, current, subsets, superset1, superset2); } else { @@ -244,14 +244,14 @@ void LandmarkFactoryHM::get_split_m_sets( if (use_var) { // include current.push_back(superset2[ss2_var_index]); - get_split_m_sets(variables, m, ss1_num_included, ss2_num_included + 1, + get_split_m_sets(variables, ss1_num_included, ss2_num_included + 1, ss1_var_index, ss2_var_index + 1, current, subsets, superset1, superset2); current.pop_back(); } // don't include - get_split_m_sets(variables, m, ss1_num_included, ss2_num_included, + get_split_m_sets(variables, ss1_num_included, ss2_num_included, ss1_var_index, ss2_var_index + 1, current, subsets, superset1, superset2); } @@ -261,17 +261,17 @@ void LandmarkFactoryHM::get_split_m_sets( // e.g. we don't want to represent (truck1-loc x, truck2-loc y) type stuff // get partial assignments of size <= m in the problem -void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, int m, vector &subsets) { +void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, vector &subsets) { FluentSet c; - get_m_sets(variables, m, 0, 0, c, subsets); + get_m_sets(variables, 0, 0, c, subsets); } // get subsets of superset with size <= m void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, - int m, vector &subsets, + vector &subsets, const FluentSet &superset) { FluentSet c; - get_m_sets_of_set(variables, m, 0, 0, c, subsets, superset); + get_m_sets_of_set(variables, 0, 0, c, subsets, superset); } // second function to get subsets of size at most m that @@ -279,21 +279,21 @@ void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, // assume disjoint void LandmarkFactoryHM::get_split_m_sets( const VariablesProxy &variables, - int m, vector &subsets, + vector &subsets, const FluentSet &superset1, const FluentSet &superset2) { FluentSet c; - get_split_m_sets(variables, m, 0, 0, 0, 0, c, subsets, superset1, superset2); + get_split_m_sets(variables, 0, 0, 0, 0, c, subsets, superset1, superset2); } // get subsets of state with size <= m -void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, int m, +void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, vector &subsets, const State &state) { FluentSet state_fluents; for (FactProxy fact : state) { state_fluents.push_back(fact.get_pair()); } - get_m_sets(variables, m, subsets, state_fluents); + get_m_sets(variables, subsets, state_fluents); } void LandmarkFactoryHM::print_proposition(const VariablesProxy &variables, const FactPair &fluent) const { @@ -481,7 +481,7 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { // preconditions of P_m op are all subsets of original pc pc = get_operator_precondition(op); - get_m_sets(variables, m, pc_subsets, pc); + get_m_sets(variables, pc_subsets, pc); pm_op.precondition.reserve(pc_subsets.size()); // set unsatisfied pc count for op @@ -496,7 +496,7 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { // same for effects eff = get_operator_postcondition(variables.size(), op); - get_m_sets(variables, m, eff_subsets, eff); + get_m_sets(variables, eff_subsets, eff); pm_op.effect.reserve(eff_subsets.size()); for (const FluentSet &eff_subset : eff_subsets) { @@ -526,8 +526,8 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { // get the subsets that have >= 1 element in the pc (unless pc is empty) // and >= 1 element in the other set - get_split_m_sets(variables, m, noop_pc_subsets, pc, it->first); - get_split_m_sets(variables, m, noop_eff_subsets, eff, it->first); + get_split_m_sets(variables, noop_pc_subsets, pc, it->first); + get_split_m_sets(variables, noop_eff_subsets, eff, it->first); this_cond_noop.reserve(noop_pc_subsets.size() + noop_eff_subsets.size() + 1); @@ -590,7 +590,7 @@ void LandmarkFactoryHM::initialize(const TaskProxy &task_proxy) { } // Get all the m or less size subsets in the domain. vector> msets; - get_m_sets(task_proxy.get_variables(), m, msets); + get_m_sets(task_proxy.get_variables(), msets); // map each set to an integer for (size_t i = 0; i < msets.size(); ++i) { @@ -733,7 +733,7 @@ void LandmarkFactoryHM::propagate_pm_atoms(int atom_index, bool newly_discovered void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { // get subsets of initial state vector init_subsets; - get_m_sets(task_proxy.get_variables(), m, init_subsets, task_proxy.get_initial_state()); + get_m_sets(task_proxy.get_variables(), init_subsets, task_proxy.get_initial_state()); TriggerSet current_trigger, next_trigger; @@ -944,7 +944,7 @@ void LandmarkFactoryHM::generate_landmarks( vector goal_subsets; FluentSet goals = task_properties::get_fact_pairs(task_proxy.get_goals()); VariablesProxy variables = task_proxy.get_variables(); - get_m_sets(variables, m, goal_subsets, goals); + get_m_sets(variables, goal_subsets, goals); list all_landmarks; for (const FluentSet &goal_subset : goal_subsets) { assert(set_indices.find(goal_subset) != set_indices.end()); diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 318294f5cf..707b8b602c 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -126,31 +126,31 @@ class LandmarkFactoryHM : public LandmarkFactory { std::vector>> unsatisfied_precondition_count; void get_m_sets( - const VariablesProxy &variables, int m, int num_included, - int current_var, FluentSet ¤t, std::vector &subsets); + const VariablesProxy &variables, int num_included, int current_var, + FluentSet ¤t, std::vector &subsets); void get_m_sets_of_set( - const VariablesProxy &variables, int m, int num_included, + const VariablesProxy &variables, int num_included, int current_var_index, FluentSet ¤t, std::vector &subsets, const FluentSet &superset); void get_split_m_sets( - const VariablesProxy &variables, int m, int ss1_num_included, + const VariablesProxy &variables, int ss1_num_included, int ss2_num_included, int ss1_var_index, int ss2_var_index, FluentSet ¤t, std::vector &subsets, const FluentSet &superset1, const FluentSet &superset2); - void get_m_sets(const VariablesProxy &variables, int m, + void get_m_sets(const VariablesProxy &variables, std::vector &subsets); - void get_m_sets(const VariablesProxy &variables, int m, + void get_m_sets(const VariablesProxy &variables, std::vector &subsets, const FluentSet &superset); - void get_m_sets(const VariablesProxy &variables, int m, + void get_m_sets(const VariablesProxy &variables, std::vector &subsets, const State &state); void get_split_m_sets( - const VariablesProxy &variables, int m, std::vector &subsets, + const VariablesProxy &variables, std::vector &subsets, const FluentSet &superset1, const FluentSet &superset2); void print_proposition( From 8908f084b28a0f98dcaf11eabb43aab1444be58c Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Mar 2025 22:56:37 +0100 Subject: [PATCH 028/108] Clean up set operation functions. --- src/search/landmarks/landmark_factory_h_m.cc | 26 ++++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 41798c43ab..093f48fa8f 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -11,20 +11,20 @@ #include "../utils/logging.h" #include "../utils/system.h" -#include #include using namespace std; using utils::ExitCode; namespace landmarks { +// TODO: Can we turn these set operations into static functions? // alist = alist \cup other template void union_with(list &alist, const list &other) { - typename list::iterator it1 = alist.begin(); - typename list::const_iterator it2 = other.begin(); + auto it1 = alist.begin(); + auto it2 = other.begin(); - while ((it1 != alist.end()) && (it2 != other.end())) { + while (it1 != alist.end() && it2 != other.end()) { if (*it1 < *it2) { ++it1; } else if (*it1 > *it2) { @@ -41,12 +41,12 @@ void union_with(list &alist, const list &other) { // alist = alist \cap other template void intersect_with(list &alist, const list &other) { - typename list::iterator it1 = alist.begin(), tmp; - typename list::const_iterator it2 = other.begin(); + auto it1 = alist.begin(); + auto it2 = other.begin(); - while ((it1 != alist.end()) && (it2 != other.end())) { + while (it1 != alist.end() && it2 != other.end()) { if (*it1 < *it2) { - tmp = it1; + auto tmp = it1; ++tmp; alist.erase(it1); it1 = tmp; @@ -63,16 +63,16 @@ void intersect_with(list &alist, const list &other) { // alist = alist \setminus other template void set_minus(list &alist, const list &other) { - typename list::iterator it1 = alist.begin(), tmp; - typename list::const_iterator it2 = other.begin(); + auto it1 = alist.begin(); + auto it2 = other.begin(); - while ((it1 != alist.end()) && (it2 != other.end())) { + while (it1 != alist.end() && it2 != other.end()) { if (*it1 < *it2) { ++it1; } else if (*it1 > *it2) { ++it2; } else { - tmp = it1; + auto tmp = it1; ++tmp; alist.erase(it1); it1 = tmp; @@ -84,7 +84,7 @@ void set_minus(list &alist, const list &other) { // alist = alist \cup {val} template void insert_into(list &alist, const T &val) { - typename list::iterator it1 = alist.begin(); + auto it1 = alist.begin(); while (it1 != alist.end()) { if (*it1 > val) { From 3867244e0e96e748966ef12c4af1328f2d378380 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Mar 2025 23:12:12 +0100 Subject: [PATCH 029/108] Rename stuff. --- src/search/landmarks/landmark_factory_h_m.cc | 242 +++++++++---------- src/search/landmarks/landmark_factory_h_m.h | 58 ++--- 2 files changed, 149 insertions(+), 151 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 093f48fa8f..b4ad098f9a 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -109,7 +109,7 @@ static bool contains(list &alist, const T &val) { // (look at all the variables in the problem) void LandmarkFactoryHM::get_m_sets( const VariablesProxy &variables, int num_included, int current_var, - FluentSet ¤t, vector &subsets) { + Propositions ¤t, vector &subsets) { int num_variables = variables.size(); if (num_included == m) { subsets.push_back(current); @@ -143,18 +143,14 @@ void LandmarkFactoryHM::get_m_sets( } // find all size m or less subsets of superset -void LandmarkFactoryHM::get_m_sets_of_set(const VariablesProxy &variables, - int num_included, - int current_var_index, - FluentSet ¤t, - vector &subsets, - const FluentSet &superset) { +void LandmarkFactoryHM::get_m_sets_of_set( + const VariablesProxy &variables, int num_included, int current_index, + Propositions ¤t, vector &subsets, const Propositions &superset) { if (num_included == m) { subsets.push_back(current); return; } - - if (current_var_index == static_cast(superset.size())) { + if (current_index == static_cast(superset.size())) { if (num_included != 0) { subsets.push_back(current); } @@ -162,32 +158,32 @@ void LandmarkFactoryHM::get_m_sets_of_set(const VariablesProxy &variables, } bool use_var = true; - for (const FactPair &fluent : current) { - if (!interesting(variables, superset[current_var_index], fluent)) { + for (const FactPair &proposition : current) { + if (!interesting(variables, superset[current_index], proposition)) { use_var = false; break; } } if (use_var) { - // include current fluent in the set - current.push_back(superset[current_var_index]); - get_m_sets_of_set(variables, num_included + 1, current_var_index + 1, current, subsets, superset); + // include current proposition in the set + current.push_back(superset[current_index]); + get_m_sets_of_set(variables, num_included + 1, current_index + 1, current, subsets, superset); current.pop_back(); } - // don't include current fluent in set - get_m_sets_of_set(variables, num_included, current_var_index + 1, current, subsets, superset); + // don't include current proposition in set + get_m_sets_of_set(variables, num_included, current_index + 1, current, subsets, superset); } // get subsets of superset1 \cup superset2 with size m or less, // such that they have >= 1 elements from each set. void LandmarkFactoryHM::get_split_m_sets( const VariablesProxy &variables, - int ss1_num_included, int ss2_num_included, - int ss1_var_index, int ss2_var_index, - FluentSet ¤t, vector &subsets, - const FluentSet &superset1, const FluentSet &superset2) { + int num_included1, int num_included2, + int current_index1, int current_index2, + Propositions ¤t, vector &subsets, + const Propositions &superset1, const Propositions &superset2) { /* if( ((ss1_var_index == superset1.size()) && (ss1_num_included == 0)) || ((ss2_var_index == superset2.size()) && (ss2_num_included == 0)) ) { @@ -195,14 +191,14 @@ void LandmarkFactoryHM::get_split_m_sets( } */ - int sup1_size = superset1.size(); - int sup2_size = superset2.size(); + int superset1_size = static_cast(superset1.size()); + int superset2_size = static_cast(superset2.size()); - if (ss1_num_included + ss2_num_included == m || - (ss1_var_index == sup1_size && ss2_var_index == sup2_size)) { + if (num_included1 + num_included2 == m || + (current_index1 == superset1_size && current_index2 == superset2_size)) { // if set is empty, don't have to include from it - if ((ss1_num_included > 0 || sup1_size == 0) && - (ss2_num_included > 0 || sup2_size == 0)) { + if ((num_included1 > 0 || superset1_size == 0) && + (num_included2 > 0 || superset2_size == 0)) { subsets.push_back(current); } return; @@ -210,11 +206,11 @@ void LandmarkFactoryHM::get_split_m_sets( bool use_var = true; - if (ss1_var_index != sup1_size && - (ss2_var_index == sup2_size || - superset1[ss1_var_index] < superset2[ss2_var_index])) { - for (const FactPair &fluent : current) { - if (!interesting(variables, superset1[ss1_var_index], fluent)) { + if (current_index1 != superset1_size && + (current_index2 == superset2_size || + superset1[current_index1] < superset2[current_index2])) { + for (const FactPair &proposition : current) { + if (!interesting(variables, superset1[current_index1], proposition)) { use_var = false; break; } @@ -222,20 +218,20 @@ void LandmarkFactoryHM::get_split_m_sets( if (use_var) { // include - current.push_back(superset1[ss1_var_index]); - get_split_m_sets(variables, ss1_num_included + 1, ss2_num_included, - ss1_var_index + 1, ss2_var_index, + current.push_back(superset1[current_index1]); + get_split_m_sets(variables, num_included1 + 1, num_included2, + current_index1 + 1, current_index2, current, subsets, superset1, superset2); current.pop_back(); } // don't include - get_split_m_sets(variables, ss1_num_included, ss2_num_included, - ss1_var_index + 1, ss2_var_index, + get_split_m_sets(variables, num_included1, num_included2, + current_index1 + 1, current_index2, current, subsets, superset1, superset2); } else { - for (const FactPair &fluent : current) { - if (!interesting(variables, superset2[ss2_var_index], fluent)) { + for (const FactPair &proposition : current) { + if (!interesting(variables, superset2[current_index2], proposition)) { use_var = false; break; } @@ -243,16 +239,16 @@ void LandmarkFactoryHM::get_split_m_sets( if (use_var) { // include - current.push_back(superset2[ss2_var_index]); - get_split_m_sets(variables, ss1_num_included, ss2_num_included + 1, - ss1_var_index, ss2_var_index + 1, + current.push_back(superset2[current_index2]); + get_split_m_sets(variables, num_included1, num_included2 + 1, + current_index1, current_index2 + 1, current, subsets, superset1, superset2); current.pop_back(); } // don't include - get_split_m_sets(variables, ss1_num_included, ss2_num_included, - ss1_var_index, ss2_var_index + 1, + get_split_m_sets(variables, num_included1, num_included2, + current_index1, current_index2 + 1, current, subsets, superset1, superset2); } } @@ -261,16 +257,16 @@ void LandmarkFactoryHM::get_split_m_sets( // e.g. we don't want to represent (truck1-loc x, truck2-loc y) type stuff // get partial assignments of size <= m in the problem -void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, vector &subsets) { - FluentSet c; +void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, vector &subsets) { + Propositions c; get_m_sets(variables, 0, 0, c, subsets); } // get subsets of superset with size <= m void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, - vector &subsets, - const FluentSet &superset) { - FluentSet c; + vector &subsets, + const Propositions &superset) { + Propositions c; get_m_sets_of_set(variables, 0, 0, c, subsets, superset); } @@ -279,43 +275,43 @@ void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, // assume disjoint void LandmarkFactoryHM::get_split_m_sets( const VariablesProxy &variables, - vector &subsets, - const FluentSet &superset1, const FluentSet &superset2) { - FluentSet c; + vector &subsets, + const Propositions &superset1, const Propositions &superset2) { + Propositions c; get_split_m_sets(variables, 0, 0, 0, 0, c, subsets, superset1, superset2); } // get subsets of state with size <= m void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, - vector &subsets, + vector &subsets, const State &state) { - FluentSet state_fluents; + Propositions state_proposition; for (FactProxy fact : state) { - state_fluents.push_back(fact.get_pair()); + state_proposition.push_back(fact.get_pair()); } - get_m_sets(variables, subsets, state_fluents); + get_m_sets(variables, subsets, state_proposition); } -void LandmarkFactoryHM::print_proposition(const VariablesProxy &variables, const FactPair &fluent) const { +void LandmarkFactoryHM::print_proposition(const VariablesProxy &variables, const FactPair &proposition) const { if (log.is_at_least_verbose()) { - VariableProxy var = variables[fluent.var]; - FactProxy fact = var.get_fact(fluent.value); + VariableProxy var = variables[proposition.var]; + FactProxy fact = var.get_fact(proposition.value); log << fact.get_name() << " (" << var.get_name() << "(" << fact.get_variable().get_id() << ")" << "->" << fact.get_value() << ")"; } } -static FluentSet get_operator_precondition(const OperatorProxy &op) { - FluentSet preconditions = task_properties::get_fact_pairs(op.get_preconditions()); +static Propositions get_operator_precondition(const OperatorProxy &op) { + Propositions preconditions = task_properties::get_fact_pairs(op.get_preconditions()); sort(preconditions.begin(), preconditions.end()); return preconditions; } // get facts that are always true after the operator application // (effects plus prevail conditions) -static FluentSet get_operator_postcondition(int num_vars, const OperatorProxy &op) { - FluentSet postconditions; +static Propositions get_operator_postcondition(int num_vars, const OperatorProxy &op) { + Propositions postconditions; EffectsProxy effects = op.get_effects(); vector has_effect_on_var(num_vars, false); @@ -341,27 +337,27 @@ void LandmarkFactoryHM::print_pm_operator(const VariablesProxy &variables, const vector, set>> conds; for (int pc : op.precondition) { - for (const FactPair &fluent : hm_table[pc].fluents) { - pcs.insert(fluent); + for (const FactPair &proposition : hm_table[pc].propositions) { + pcs.insert(proposition); } } for (int eff : op.effect) { - for (const FactPair &fluent : hm_table[eff].fluents) { - effs.insert(fluent); + for (const FactPair &proposition : hm_table[eff].propositions) { + effs.insert(proposition); } } for (size_t i = 0; i < op.conditional_noops.size(); ++i) { cond_pc.clear(); cond_eff.clear(); - int pm_fluent; + int pm_proposition; size_t j; log << "PC:" << endl; - for (j = 0; (pm_fluent = op.conditional_noops[i][j]) != -1; ++j) { - print_fluent_set(variables, hm_table[pm_fluent].fluents); + for (j = 0; (pm_proposition = op.conditional_noops[i][j]) != -1; ++j) { + print_proposition_set(variables, hm_table[pm_proposition].propositions); log << endl; - for (size_t k = 0; k < hm_table[pm_fluent].fluents.size(); ++k) { - cond_pc.insert(hm_table[pm_fluent].fluents[k]); + for (size_t k = 0; k < hm_table[pm_proposition].propositions.size(); ++k) { + cond_pc.insert(hm_table[pm_proposition].propositions[k]); } } // advance to effects section @@ -370,13 +366,13 @@ void LandmarkFactoryHM::print_pm_operator(const VariablesProxy &variables, const log << "EFF:" << endl; for (; j < op.conditional_noops[i].size(); ++j) { - int pm_fluent = op.conditional_noops[i][j]; + int pm_proposition = op.conditional_noops[i][j]; - print_fluent_set(variables, hm_table[pm_fluent].fluents); + print_proposition_set(variables, hm_table[pm_proposition].propositions); log << endl; - for (size_t k = 0; k < hm_table[pm_fluent].fluents.size(); ++k) { - cond_eff.insert(hm_table[pm_fluent].fluents[k]); + for (size_t k = 0; k < hm_table[pm_proposition].propositions.size(); ++k) { + cond_eff.insert(hm_table[pm_proposition].propositions[k]); } } conds.emplace_back(cond_pc, cond_eff); @@ -413,7 +409,7 @@ void LandmarkFactoryHM::print_pm_operator(const VariablesProxy &variables, const } } -void LandmarkFactoryHM::print_fluent_set(const VariablesProxy &variables, const FluentSet &fs) const { +void LandmarkFactoryHM::print_proposition_set(const VariablesProxy &variables, const Propositions &fs) const { if (log.is_at_least_verbose()) { log << "( "; for (const FactPair &fact : fs) { @@ -427,11 +423,11 @@ void LandmarkFactoryHM::print_fluent_set(const VariablesProxy &variables, const // check whether fs2 is a possible noop set for action with fs1 as effect // sets cannot be 1) defined on same variable, 2) otherwise mutex bool LandmarkFactoryHM::possible_noop_set(const VariablesProxy &variables, - const FluentSet &fs1, - const FluentSet &fs2) { - FluentSet::const_iterator fs1it = fs1.begin(), fs2it = fs2.begin(); + const Propositions &propositions1, + const Propositions &propositions2) { + Propositions::const_iterator fs1it = propositions1.begin(), fs2it = propositions2.begin(); - while (fs1it != fs1.end() && fs2it != fs2.end()) { + while (fs1it != propositions1.end() && fs2it != propositions2.end()) { if (fs1it->var == fs2it->var) { return false; } else if (fs1it->var < fs2it->var) { @@ -441,11 +437,11 @@ bool LandmarkFactoryHM::possible_noop_set(const VariablesProxy &variables, } } - for (const FactPair &fluent1 : fs1) { - FactProxy fact1 = variables[fluent1.var].get_fact(fluent1.value); - for (const FactPair &fluent2 : fs2) { + for (const FactPair &proposition1 : propositions1) { + FactProxy fact1 = variables[proposition1.var].get_fact(proposition1.value); + for (const FactPair &proposition2 : propositions2) { if (fact1.is_mutex( - variables[fluent2.var].get_fact(fluent2.value))) + variables[proposition2.var].get_fact(proposition2.value))) return false; } } @@ -456,8 +452,8 @@ bool LandmarkFactoryHM::possible_noop_set(const VariablesProxy &variables, // make the operators of the P_m problem void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { - FluentSet pc, eff; - vector pc_subsets, eff_subsets, noop_pc_subsets, noop_eff_subsets; + Propositions pc, eff; + vector pc_subsets, eff_subsets, noop_pc_subsets, noop_eff_subsets; static int op_count = 0; int set_index, noop_index; @@ -487,7 +483,7 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { // set unsatisfied pc count for op unsatisfied_precondition_count[op.get_id()].first = pc_subsets.size(); - for (const FluentSet &pc_subset : pc_subsets) { + for (const Propositions &pc_subset : pc_subsets) { assert(set_indices.find(pc_subset) != set_indices.end()); set_index = set_indices[pc_subset]; pm_op.precondition.push_back(set_index); @@ -499,7 +495,7 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { get_m_sets(variables, eff_subsets, eff); pm_op.effect.reserve(eff_subsets.size()); - for (const FluentSet &eff_subset : eff_subsets) { + for (const Propositions &eff_subset : eff_subsets) { assert(set_indices.find(eff_subset) != set_indices.end()); set_index = set_indices[eff_subset]; pm_op.effect.push_back(set_index); @@ -511,7 +507,7 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { // they conflict with the effect of the operator (no need to check pc // because mvvs appearing in pc also appear in effect - FluentSetToIntMap::const_iterator it = set_indices.begin(); + PropositionSetToIntMap::const_iterator it = set_indices.begin(); while (static_cast(it->first.size()) < m && it != set_indices.end()) { if (possible_noop_set(variables, eff, it->first)) { @@ -596,10 +592,10 @@ void LandmarkFactoryHM::initialize(const TaskProxy &task_proxy) { for (size_t i = 0; i < msets.size(); ++i) { hm_table.emplace_back(); set_indices[msets[i]] = i; - hm_table[i].fluents = msets[i]; + hm_table[i].propositions = msets[i]; } if (log.is_at_least_normal()) { - log << "Using " << hm_table.size() << " P^m fluents." << endl; + log << "Using " << hm_table.size() << " P^m propositions." << endl; } build_pm_operators(task_proxy); @@ -643,8 +639,8 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { } for (int op_id : candidates) { - FluentSet post = get_operator_postcondition(variables.size(), operators[op_id]); - FluentSet pre = get_operator_precondition(operators[op_id]); + Propositions post = get_operator_postcondition(variables.size(), operators[op_id]); + Propositions pre = get_operator_precondition(operators[op_id]); size_t j; for (j = 0; j < landmark.atoms.size(); ++j) { const FactPair &atom = landmark.atoms[j]; @@ -652,8 +648,8 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { if (find(post.begin(), post.end(), atom) != post.end()) continue; bool is_mutex = false; - for (const FactPair &fluent : post) { - if (variables[fluent.var].get_fact(fluent.value).is_mutex( + for (const FactPair &proposition : post) { + if (variables[proposition.var].get_fact(proposition.value).is_mutex( variables[atom.var].get_fact(atom.value))) { is_mutex = true; break; @@ -662,10 +658,10 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { if (is_mutex) { break; } - for (const FactPair &fluent : pre) { + for (const FactPair &proposition : pre) { // we know that lm_val is not added by the operator // so if it incompatible with the pc, this can't be an achiever - if (variables[fluent.var].get_fact(fluent.value).is_mutex( + if (variables[proposition.var].get_fact(proposition.value).is_mutex( variables[atom.var].get_fact(atom.value))) { is_mutex = true; break; @@ -676,7 +672,7 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { } } if (j == landmark.atoms.size()) { - // not inconsistent with any of the other landmark fluents + // not inconsistent with any of the other landmark propositions landmark.possible_achievers.insert(op_id); } } @@ -732,7 +728,7 @@ void LandmarkFactoryHM::propagate_pm_atoms(int atom_index, bool newly_discovered void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { // get subsets of initial state - vector init_subsets; + vector init_subsets; get_m_sets(task_proxy.get_variables(), init_subsets, task_proxy.get_initial_state()); TriggerSet current_trigger, next_trigger; @@ -863,7 +859,7 @@ void LandmarkFactoryHM::compute_noop_landmarks( TriggerSet &next_trigger) { list cn_necessary, cn_landmarks; size_t prev_size; - int pm_fluent; + int pm_proposition; PiMOperator &action = pm_operators[op_index]; vector &pc_eff_pair = action.conditional_noops[noop_index]; @@ -878,12 +874,12 @@ void LandmarkFactoryHM::compute_noop_landmarks( } size_t i; - for (i = 0; (pm_fluent = pc_eff_pair[i]) != -1; ++i) { - union_with(cn_landmarks, hm_table[pm_fluent].landmarks); - insert_into(cn_landmarks, pm_fluent); + for (i = 0; (pm_proposition = pc_eff_pair[i]) != -1; ++i) { + union_with(cn_landmarks, hm_table[pm_proposition].landmarks); + insert_into(cn_landmarks, pm_proposition); if (use_orders) { - insert_into(cn_necessary, pm_fluent); + insert_into(cn_necessary, pm_proposition); } } @@ -891,32 +887,32 @@ void LandmarkFactoryHM::compute_noop_landmarks( ++i; for (; i < pc_eff_pair.size(); ++i) { - pm_fluent = pc_eff_pair[i]; - if (hm_table[pm_fluent].level != -1) { - prev_size = hm_table[pm_fluent].landmarks.size(); - intersect_with(hm_table[pm_fluent].landmarks, cn_landmarks); + pm_proposition = pc_eff_pair[i]; + if (hm_table[pm_proposition].level != -1) { + prev_size = hm_table[pm_proposition].landmarks.size(); + intersect_with(hm_table[pm_proposition].landmarks, cn_landmarks); // if the add effect appears in cn_landmarks, // fact is being achieved for >1st time // no need to intersect for gn orderings // or add op to first achievers - if (!contains(cn_landmarks, pm_fluent)) { - insert_into(hm_table[pm_fluent].first_achievers, op_index); + if (!contains(cn_landmarks, pm_proposition)) { + insert_into(hm_table[pm_proposition].first_achievers, op_index); if (use_orders) { - intersect_with(hm_table[pm_fluent].necessary, cn_necessary); + intersect_with(hm_table[pm_proposition].necessary, cn_necessary); } } - if (hm_table[pm_fluent].landmarks.size() != prev_size) - propagate_pm_atoms(pm_fluent, false, next_trigger); + if (hm_table[pm_proposition].landmarks.size() != prev_size) + propagate_pm_atoms(pm_proposition, false, next_trigger); } else { - hm_table[pm_fluent].level = level; - hm_table[pm_fluent].landmarks = cn_landmarks; + hm_table[pm_proposition].level = level; + hm_table[pm_proposition].landmarks = cn_landmarks; if (use_orders) { - hm_table[pm_fluent].necessary = cn_necessary; + hm_table[pm_proposition].necessary = cn_necessary; } - insert_into(hm_table[pm_fluent].first_achievers, op_index); - propagate_pm_atoms(pm_fluent, true, next_trigger); + insert_into(hm_table[pm_proposition].first_achievers, op_index); + propagate_pm_atoms(pm_proposition, true, next_trigger); } } } @@ -924,7 +920,7 @@ void LandmarkFactoryHM::compute_noop_landmarks( void LandmarkFactoryHM::add_landmark_node(int set_index, bool goal) { if (landmark_node_table.find(set_index) == landmark_node_table.end()) { const HMEntry &hm_entry = hm_table[set_index]; - vector facts(hm_entry.fluents); + vector facts(hm_entry.propositions); utils::sort_unique(facts); assert(!facts.empty()); Landmark landmark(facts, false, (facts.size() > 1), goal); @@ -941,12 +937,12 @@ void LandmarkFactoryHM::generate_landmarks( initialize(task_proxy); compute_hm_landmarks(task_proxy); // now construct landmarks graph - vector goal_subsets; - FluentSet goals = task_properties::get_fact_pairs(task_proxy.get_goals()); + vector goal_subsets; + Propositions goals = task_properties::get_fact_pairs(task_proxy.get_goals()); VariablesProxy variables = task_proxy.get_variables(); get_m_sets(variables, goal_subsets, goals); list all_landmarks; - for (const FluentSet &goal_subset : goal_subsets) { + for (const Propositions &goal_subset : goal_subsets) { assert(set_indices.find(goal_subset) != set_indices.end()); int set_index = set_indices[goal_subset]; @@ -955,7 +951,7 @@ void LandmarkFactoryHM::generate_landmarks( if (log.is_at_least_verbose()) { log << endl << endl << "Subset of goal not reachable !!." << endl << endl << endl; log << "Subset is: "; - print_fluent_set(variables, hm_table[set_index].fluents); + print_proposition_set(variables, hm_table[set_index].propositions); log << endl; } } diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 707b8b602c..8a20a29551 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -7,12 +7,12 @@ #include namespace landmarks { -using FluentSet = std::vector; +using Propositions = std::vector; -std::ostream &operator<<(std::ostream &os, const FluentSet &fs); +std::ostream &operator<<(std::ostream &os, const Propositions &fs); -struct FluentSetComparer { - bool operator()(const FluentSet &fs1, const FluentSet &fs2) const { +struct PropositionSetComparer { + bool operator()(const Propositions &fs1, const Propositions &fs2) const { if (fs1.size() != fs2.size()) { return fs1.size() < fs2.size(); } @@ -36,10 +36,10 @@ struct PiMOperator { int index; }; -// represents a fluent in the P^m problem +// represents a proposition in the P^m problem struct HMEntry { // Propositions that belong to this set. - FluentSet fluents; + Propositions propositions; // -1 -> current cost infinite // 0 -> present in initial state int level; @@ -58,13 +58,14 @@ struct HMEntry { -1 for op itself */ std::vector pc_for; - // TODO: Maybe set the fluents in the constructor as well? + // TODO: Maybe set the propositions in the constructor as well? HMEntry() : level(-1) { } }; -using FluentSetToIntMap = std::map; +using PropositionSetToIntMap = + std::map; class LandmarkFactoryHM : public LandmarkFactory { using TriggerSet = std::unordered_map>; @@ -83,8 +84,8 @@ class LandmarkFactoryHM : public LandmarkFactory { TriggerSet &trigger); bool possible_noop_set(const VariablesProxy &variables, - const FluentSet &fs1, - const FluentSet &fs2); + const Propositions &propositions1, + const Propositions &propositions2); void build_pm_operators(const TaskProxy &task_proxy); // TODO: What is interesting? bool interesting(const VariablesProxy &variables, @@ -102,8 +103,8 @@ class LandmarkFactoryHM : public LandmarkFactory { void initialize(const TaskProxy &task_proxy); void free_unneeded_memory(); - void print_fluent_set( - const VariablesProxy &variables, const FluentSet &fs) const; + void print_proposition_set( + const VariablesProxy &variables, const Propositions &fs) const; void print_pm_operator( const VariablesProxy &variables, const PiMOperator &op) const; @@ -115,8 +116,8 @@ class LandmarkFactoryHM : public LandmarkFactory { std::vector hm_table; std::vector pm_operators; - // Maps each set of &subsets); + Propositions ¤t, std::vector &subsets); void get_m_sets_of_set( const VariablesProxy &variables, int num_included, - int current_var_index, FluentSet ¤t, - std::vector &subsets, const FluentSet &superset); + int current_index, Propositions ¤t, + std::vector &subsets, const Propositions &superset); void get_split_m_sets( - const VariablesProxy &variables, int ss1_num_included, - int ss2_num_included, int ss1_var_index, int ss2_var_index, - FluentSet ¤t, std::vector &subsets, - const FluentSet &superset1, const FluentSet &superset2); + const VariablesProxy &variables, int num_included1, + int num_included2, int current_index1, int current_index2, + Propositions ¤t, std::vector &subsets, + const Propositions &superset1, const Propositions &superset2); void get_m_sets(const VariablesProxy &variables, - std::vector &subsets); + std::vector &subsets); - void get_m_sets(const VariablesProxy &variables, - std::vector &subsets, const FluentSet &superset); + void get_m_sets( + const VariablesProxy &variables, std::vector &subsets, + const Propositions &superset); void get_m_sets(const VariablesProxy &variables, - std::vector &subsets, const State &state); + std::vector &subsets, const State &state); void get_split_m_sets( - const VariablesProxy &variables, std::vector &subsets, - const FluentSet &superset1, const FluentSet &superset2); + const VariablesProxy &variables, std::vector &subsets, + const Propositions &superset1, const Propositions &superset2); void print_proposition( - const VariablesProxy &variables, const FactPair &fluent) const; + const VariablesProxy &variables, const FactPair &proposition) const; public: LandmarkFactoryHM(int m, bool conjunctive_landmarks, From 514fe454cbceddd80e4fff21284c085a188fb252 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Mar 2025 23:30:44 +0100 Subject: [PATCH 030/108] Start breaking apart 'get_m_set' functions. --- src/search/landmarks/landmark_factory_h_m.cc | 117 ++++++++++--------- src/search/landmarks/landmark_factory_h_m.h | 7 ++ 2 files changed, 70 insertions(+), 54 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index b4ad098f9a..d2d898e056 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -104,45 +104,68 @@ static bool contains(list &alist, const T &val) { return find(alist.begin(), alist.end(), val) != alist.end(); } +static bool are_mutex(const VariablesProxy &variables, + const FactPair &atom1, const FactPair &atom2) { + return variables[atom1.var].get_fact(atom1.value).is_mutex( + variables[atom2.var].get_fact(atom2.value)); +} + +void LandmarkFactoryHM::get_m_sets_including_current_var( + const VariablesProxy &variables, int num_included, int current_var, + Propositions ¤t, std::vector &subsets) { + int domain_size = variables[current_var].get_domain_size(); + for (int value = 0; value < domain_size; ++value) { + FactPair atom(current_var, value); + bool use_var = ranges::none_of( + current.begin(), current.end(), [&](const FactPair &other) { + return are_mutex(variables, atom, other); + }); + if (use_var) { + current.push_back(atom); + get_m_sets(variables, num_included + 1, current_var + 1, current, subsets); + current.pop_back(); + } + } +} -// find partial variable assignments with size m or less -// (look at all the variables in the problem) +// Find partial variable assignments of size m or less. void LandmarkFactoryHM::get_m_sets( const VariablesProxy &variables, int num_included, int current_var, Propositions ¤t, vector &subsets) { - int num_variables = variables.size(); if (num_included == m) { subsets.push_back(current); return; } - if (current_var == num_variables) { + if (current_var == static_cast(variables.size())) { if (num_included != 0) { subsets.push_back(current); } return; } - // include a value of current_var in the set - for (int i = 0; i < variables[current_var].get_domain_size(); ++i) { - bool use_var = true; - FactPair current_var_fact(current_var, i); - for (const FactPair ¤t_fact : current) { - if (!interesting(variables, current_var_fact, current_fact)) { - use_var = false; - break; - } - } + get_m_sets_including_current_var( + variables, num_included, current_var, current, subsets); + // Do not include a value of `current_var` in the set. + get_m_sets(variables, num_included, current_var + 1, current, subsets); +} - if (use_var) { - current.push_back(current_var_fact); - get_m_sets(variables, num_included + 1, current_var + 1, current, subsets); - current.pop_back(); - } +void LandmarkFactoryHM::get_m_sets_of_set_including_current_proposition( + const VariablesProxy &variables, int num_included, + int current_var_index, Propositions ¤t, + std::vector &subsets, const Propositions &superset) { + const FactPair &atom = superset[current_var_index]; + bool use_fluent = ranges::none_of( + current.begin(), current.end(), [&](const FactPair &other) { + return are_mutex(variables, atom, other); + }); + if (use_fluent) { + current.push_back(atom); + get_m_sets_of_set(variables, num_included + 1, current_var_index + 1, + current, subsets, superset); + current.pop_back(); } - // don't include a value of current_var in the set - get_m_sets(variables, num_included, current_var + 1, current, subsets); } -// find all size m or less subsets of superset +// Find all subsets of `superset` with size m or less. void LandmarkFactoryHM::get_m_sets_of_set( const VariablesProxy &variables, int num_included, int current_index, Propositions ¤t, vector &subsets, const Propositions &superset) { @@ -156,49 +179,27 @@ void LandmarkFactoryHM::get_m_sets_of_set( } return; } - - bool use_var = true; - for (const FactPair &proposition : current) { - if (!interesting(variables, superset[current_index], proposition)) { - use_var = false; - break; - } - } - - if (use_var) { - // include current proposition in the set - current.push_back(superset[current_index]); - get_m_sets_of_set(variables, num_included + 1, current_index + 1, current, subsets, superset); - current.pop_back(); - } - - // don't include current proposition in set + get_m_sets_of_set_including_current_proposition( + variables, num_included, current_index, current, subsets, superset); + // Do not include `current` fluent in set. get_m_sets_of_set(variables, num_included, current_index + 1, current, subsets, superset); } -// get subsets of superset1 \cup superset2 with size m or less, -// such that they have >= 1 elements from each set. +/* Get subsets of `superset1` \cup `superset2` with size m or less, such that + all subsets have >= 1 elements from each superset. */ void LandmarkFactoryHM::get_split_m_sets( - const VariablesProxy &variables, - int num_included1, int num_included2, + const VariablesProxy &variables, int num_included1, int num_included2, int current_index1, int current_index2, Propositions ¤t, vector &subsets, const Propositions &superset1, const Propositions &superset2) { - /* - if( ((ss1_var_index == superset1.size()) && (ss1_num_included == 0)) || - ((ss2_var_index == superset2.size()) && (ss2_num_included == 0)) ) { - return; - } - */ - int superset1_size = static_cast(superset1.size()); int superset2_size = static_cast(superset2.size()); + assert(superset1_size > 0); + assert(superset2_size > 0); if (num_included1 + num_included2 == m || (current_index1 == superset1_size && current_index2 == superset2_size)) { - // if set is empty, don't have to include from it - if ((num_included1 > 0 || superset1_size == 0) && - (num_included2 > 0 || superset2_size == 0)) { + if (num_included1 > 0 && num_included2 > 0) { subsets.push_back(current); } return; @@ -278,7 +279,14 @@ void LandmarkFactoryHM::get_split_m_sets( vector &subsets, const Propositions &superset1, const Propositions &superset2) { Propositions c; - get_split_m_sets(variables, 0, 0, 0, 0, c, subsets, superset1, superset2); + // If a set is empty, we do not have to include from it. TODO: Why not? + if (superset1.empty()) { + get_m_sets_of_set(variables, 0, 0, c, subsets, superset2); + } else if (superset2.empty()) { + get_m_sets_of_set(variables, 0, 0, c, subsets, superset1); + } else { + get_split_m_sets(variables, 0, 0, 0, 0, c, subsets, superset1, superset2); + } } // get subsets of state with size <= m @@ -560,6 +568,7 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { } } +// TODO: Replace usages of `interesting` with `are_mutex` above. bool LandmarkFactoryHM::interesting(const VariablesProxy &variables, const FactPair &fact1, const FactPair &fact2) const { // mutexes can always be safely pruned diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 8a20a29551..35a72fa80d 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -126,10 +126,17 @@ class LandmarkFactoryHM : public LandmarkFactory { */ std::vector>> unsatisfied_precondition_count; + void get_m_sets_including_current_var( + const VariablesProxy &variables, int num_included, int current_var, + Propositions ¤t, std::vector &subsets); void get_m_sets( const VariablesProxy &variables, int num_included, int current_var, Propositions ¤t, std::vector &subsets); + void get_m_sets_of_set_including_current_proposition( + const VariablesProxy &variables, int num_included, + int current_index, Propositions ¤t, + std::vector &subsets, const Propositions &superset); void get_m_sets_of_set( const VariablesProxy &variables, int num_included, int current_index, Propositions ¤t, From bc83f2027bff74f5b3c6b9405762bf45b48f9df9 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 11 Mar 2025 23:58:36 +0100 Subject: [PATCH 031/108] Finish breaking apart 'get_m_sets' functions. --- src/search/landmarks/landmark_factory_h_m.cc | 105 +++++++++---------- src/search/landmarks/landmark_factory_h_m.h | 5 + 2 files changed, 55 insertions(+), 55 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index d2d898e056..a6e5e634d3 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -150,16 +150,16 @@ void LandmarkFactoryHM::get_m_sets( void LandmarkFactoryHM::get_m_sets_of_set_including_current_proposition( const VariablesProxy &variables, int num_included, - int current_var_index, Propositions ¤t, + int current_index, Propositions ¤t, std::vector &subsets, const Propositions &superset) { - const FactPair &atom = superset[current_var_index]; - bool use_fluent = ranges::none_of( + const FactPair &atom = superset[current_index]; + bool use_proposition = ranges::none_of( current.begin(), current.end(), [&](const FactPair &other) { return are_mutex(variables, atom, other); }); - if (use_fluent) { + if (use_proposition) { current.push_back(atom); - get_m_sets_of_set(variables, num_included + 1, current_var_index + 1, + get_m_sets_of_set(variables, num_included + 1, current_index + 1, current, subsets, superset); current.pop_back(); } @@ -181,10 +181,29 @@ void LandmarkFactoryHM::get_m_sets_of_set( } get_m_sets_of_set_including_current_proposition( variables, num_included, current_index, current, subsets, superset); - // Do not include `current` fluent in set. + // Do not include proposition at `current_index` in set. get_m_sets_of_set(variables, num_included, current_index + 1, current, subsets, superset); } +void LandmarkFactoryHM::get_split_m_sets_including_current_proposition_from_first( + const VariablesProxy &variables, int num_included1, int num_included2, + int current_index1, int current_index2, + Propositions ¤t, std::vector &subsets, + const Propositions &superset1, const Propositions &superset2) { + const FactPair &atom = superset1[current_index1]; + bool use_proposition = ranges::none_of( + current.begin(), current.end(), [&](const FactPair &other) { + return are_mutex(variables, atom, other); + }); + if (use_proposition) { + current.push_back(atom); + get_split_m_sets(variables, num_included1 + 1, num_included2, + current_index1 + 1, current_index2, + current, subsets, superset1, superset2); + current.pop_back(); + } +} + /* Get subsets of `superset1` \cup `superset2` with size m or less, such that all subsets have >= 1 elements from each superset. */ void LandmarkFactoryHM::get_split_m_sets( @@ -198,72 +217,48 @@ void LandmarkFactoryHM::get_split_m_sets( assert(superset2_size > 0); if (num_included1 + num_included2 == m || - (current_index1 == superset1_size && current_index2 == superset2_size)) { + (current_index1 == superset1_size && + current_index2 == superset2_size)) { if (num_included1 > 0 && num_included2 > 0) { subsets.push_back(current); } return; } - bool use_var = true; - if (current_index1 != superset1_size && (current_index2 == superset2_size || superset1[current_index1] < superset2[current_index2])) { - for (const FactPair &proposition : current) { - if (!interesting(variables, superset1[current_index1], proposition)) { - use_var = false; - break; - } - } - - if (use_var) { - // include - current.push_back(superset1[current_index1]); - get_split_m_sets(variables, num_included1 + 1, num_included2, - current_index1 + 1, current_index2, - current, subsets, superset1, superset2); - current.pop_back(); - } - - // don't include - get_split_m_sets(variables, num_included1, num_included2, - current_index1 + 1, current_index2, - current, subsets, superset1, superset2); + get_split_m_sets_including_current_proposition_from_first( + variables, num_included1, num_included2, current_index1, + current_index2, current, subsets, superset1, superset2); + // Do not include proposition at `current_index1` in set. + get_split_m_sets( + variables, num_included1, num_included2, current_index1 + 1, + current_index2, current, subsets, superset1, superset2); } else { - for (const FactPair &proposition : current) { - if (!interesting(variables, superset2[current_index2], proposition)) { - use_var = false; - break; - } - } - - if (use_var) { - // include - current.push_back(superset2[current_index2]); - get_split_m_sets(variables, num_included1, num_included2 + 1, - current_index1, current_index2 + 1, - current, subsets, superset1, superset2); - current.pop_back(); - } - - // don't include - get_split_m_sets(variables, num_included1, num_included2, - current_index1, current_index2 + 1, - current, subsets, superset1, superset2); + /* + Switching order of 1 and 2 here to avoid code duplication in the form + of a function `get_split_m_sets_including_current_proposition_from_second` + analogus to `get_split_m_sets_including_current_proposition_from_first`. + */ + get_split_m_sets_including_current_proposition_from_first( + variables, num_included2, num_included1, current_index2, + current_index1, current, subsets, superset2, superset1); + // Do not include proposition at `current_index2` in set. + get_split_m_sets( + variables, num_included1, num_included2, current_index1, + current_index2 + 1, current, subsets, superset1, superset2); } } -// use together is method that determines whether the two variables are interesting together, -// e.g. we don't want to represent (truck1-loc x, truck2-loc y) type stuff - -// get partial assignments of size <= m in the problem -void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, vector &subsets) { +// Get partial assignments of size <= m in the problem. +void LandmarkFactoryHM::get_m_sets( + const VariablesProxy &variables, vector &subsets) { Propositions c; get_m_sets(variables, 0, 0, c, subsets); } -// get subsets of superset with size <= m +// Get subsets of `superset` with size <= m. void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, vector &subsets, const Propositions &superset) { diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 35a72fa80d..16f701d6f4 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -142,6 +142,11 @@ class LandmarkFactoryHM : public LandmarkFactory { int current_index, Propositions ¤t, std::vector &subsets, const Propositions &superset); + void get_split_m_sets_including_current_proposition_from_first( + const VariablesProxy &variables, int num_included1, + int num_included2, int current_index1, int current_index2, + Propositions ¤t, std::vector &subsets, + const Propositions &superset1, const Propositions &superset2); void get_split_m_sets( const VariablesProxy &variables, int num_included1, int num_included2, int current_index1, int current_index2, From 4e5a9cd4260f48c129d832406ac3cc37c49d9929 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 12 Mar 2025 00:19:24 +0100 Subject: [PATCH 032/108] Add return type to outermost 'get_m_sets' functions. --- src/search/landmarks/landmark_factory_h_m.cc | 77 ++++++++++---------- src/search/landmarks/landmark_factory_h_m.h | 16 ++-- 2 files changed, 46 insertions(+), 47 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index a6e5e634d3..206d633e23 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -204,8 +204,8 @@ void LandmarkFactoryHM::get_split_m_sets_including_current_proposition_from_firs } } -/* Get subsets of `superset1` \cup `superset2` with size m or less, such that - all subsets have >= 1 elements from each superset. */ +/* Get subsets of `superset1` \cup `superset2` with size m or less, such + that all subsets have >= 1 elements from each superset. */ void LandmarkFactoryHM::get_split_m_sets( const VariablesProxy &variables, int num_included1, int num_included2, int current_index1, int current_index2, @@ -239,7 +239,7 @@ void LandmarkFactoryHM::get_split_m_sets( /* Switching order of 1 and 2 here to avoid code duplication in the form of a function `get_split_m_sets_including_current_proposition_from_second` - analogus to `get_split_m_sets_including_current_proposition_from_first`. + analogous to `get_split_m_sets_including_current_proposition_from_first`. */ get_split_m_sets_including_current_proposition_from_first( variables, num_included2, num_included1, current_index2, @@ -252,28 +252,35 @@ void LandmarkFactoryHM::get_split_m_sets( } // Get partial assignments of size <= m in the problem. -void LandmarkFactoryHM::get_m_sets( - const VariablesProxy &variables, vector &subsets) { +vector LandmarkFactoryHM::get_m_sets( + const VariablesProxy &variables) { Propositions c; + vector subsets; get_m_sets(variables, 0, 0, c, subsets); + return subsets; } // Get subsets of `superset` with size <= m. -void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, - vector &subsets, - const Propositions &superset) { +vector LandmarkFactoryHM::get_m_sets( + const VariablesProxy &variables, const Propositions &superset) { Propositions c; + vector subsets; get_m_sets_of_set(variables, 0, 0, c, subsets, superset); + return subsets; } -// second function to get subsets of size at most m that -// have at least one element in ss1 and same in ss2 -// assume disjoint -void LandmarkFactoryHM::get_split_m_sets( +/* + Get subsets of size <= m such that at least one element from `superset1` and + at least one element from `superset2` are included, except if a sets is empty. + We assume `superset1` and `superset2` are disjoint. + TODO: Assert that supersets are disjoint. Should the variables occurring in + the sets be disjoint, rather than their propositions? +*/ +vector LandmarkFactoryHM::get_split_m_sets( const VariablesProxy &variables, - vector &subsets, const Propositions &superset1, const Propositions &superset2) { Propositions c; + vector subsets; // If a set is empty, we do not have to include from it. TODO: Why not? if (superset1.empty()) { get_m_sets_of_set(variables, 0, 0, c, subsets, superset2); @@ -282,17 +289,18 @@ void LandmarkFactoryHM::get_split_m_sets( } else { get_split_m_sets(variables, 0, 0, 0, 0, c, subsets, superset1, superset2); } + return subsets; } -// get subsets of state with size <= m -void LandmarkFactoryHM::get_m_sets(const VariablesProxy &variables, - vector &subsets, - const State &state) { - Propositions state_proposition; +// Get subsets of the propositions true in `state` with size <= m. +vector LandmarkFactoryHM::get_m_sets( + const VariablesProxy &variables, const State &state) { + Propositions state_propositions; + state_propositions.reserve(state.size()); for (FactProxy fact : state) { - state_proposition.push_back(fact.get_pair()); + state_propositions.push_back(fact.get_pair()); } - get_m_sets(variables, subsets, state_proposition); + return get_m_sets(variables, state_propositions); } void LandmarkFactoryHM::print_proposition(const VariablesProxy &variables, const FactPair &proposition) const { @@ -456,8 +464,6 @@ bool LandmarkFactoryHM::possible_noop_set(const VariablesProxy &variables, // make the operators of the P_m problem void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { Propositions pc, eff; - vector pc_subsets, eff_subsets, noop_pc_subsets, noop_eff_subsets; - static int op_count = 0; int set_index, noop_index; @@ -475,12 +481,9 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { PiMOperator &pm_op = pm_operators[op.get_id()]; pm_op.index = op_count++; - pc_subsets.clear(); - eff_subsets.clear(); - // preconditions of P_m op are all subsets of original pc pc = get_operator_precondition(op); - get_m_sets(variables, pc_subsets, pc); + vector pc_subsets = get_m_sets(variables, pc); pm_op.precondition.reserve(pc_subsets.size()); // set unsatisfied pc count for op @@ -495,7 +498,7 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { // same for effects eff = get_operator_postcondition(variables.size(), op); - get_m_sets(variables, eff_subsets, eff); + vector eff_subsets = get_m_sets(variables, eff); pm_op.effect.reserve(eff_subsets.size()); for (const Propositions &eff_subset : eff_subsets) { @@ -519,14 +522,13 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { vector &this_cond_noop = pm_op.conditional_noops.back(); - noop_pc_subsets.clear(); - noop_eff_subsets.clear(); - // get the subsets that have >= 1 element in the pc (unless pc is empty) // and >= 1 element in the other set - get_split_m_sets(variables, noop_pc_subsets, pc, it->first); - get_split_m_sets(variables, noop_eff_subsets, eff, it->first); + vector noop_pc_subsets = + get_split_m_sets(variables, pc, it->first); + vector noop_eff_subsets = + get_split_m_sets(variables, eff, it->first); this_cond_noop.reserve(noop_pc_subsets.size() + noop_eff_subsets.size() + 1); @@ -589,8 +591,8 @@ void LandmarkFactoryHM::initialize(const TaskProxy &task_proxy) { utils::exit_with(ExitCode::SEARCH_UNSUPPORTED); } // Get all the m or less size subsets in the domain. - vector> msets; - get_m_sets(task_proxy.get_variables(), msets); + vector> msets = + get_m_sets(task_proxy.get_variables()); // map each set to an integer for (size_t i = 0; i < msets.size(); ++i) { @@ -732,8 +734,8 @@ void LandmarkFactoryHM::propagate_pm_atoms(int atom_index, bool newly_discovered void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { // get subsets of initial state - vector init_subsets; - get_m_sets(task_proxy.get_variables(), init_subsets, task_proxy.get_initial_state()); + vector init_subsets = + get_m_sets(task_proxy.get_variables(), task_proxy.get_initial_state()); TriggerSet current_trigger, next_trigger; @@ -941,10 +943,9 @@ void LandmarkFactoryHM::generate_landmarks( initialize(task_proxy); compute_hm_landmarks(task_proxy); // now construct landmarks graph - vector goal_subsets; Propositions goals = task_properties::get_fact_pairs(task_proxy.get_goals()); VariablesProxy variables = task_proxy.get_variables(); - get_m_sets(variables, goal_subsets, goals); + vector goal_subsets = get_m_sets(variables, goals); list all_landmarks; for (const Propositions &goal_subset : goal_subsets) { assert(set_indices.find(goal_subset) != set_indices.end()); diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 16f701d6f4..7fe5e841e1 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -153,18 +153,16 @@ class LandmarkFactoryHM : public LandmarkFactory { Propositions ¤t, std::vector &subsets, const Propositions &superset1, const Propositions &superset2); - void get_m_sets(const VariablesProxy &variables, - std::vector &subsets); + std::vector get_m_sets(const VariablesProxy &variables); - void get_m_sets( - const VariablesProxy &variables, std::vector &subsets, - const Propositions &superset); + std::vector get_m_sets( + const VariablesProxy &variables, const Propositions &superset); - void get_m_sets(const VariablesProxy &variables, - std::vector &subsets, const State &state); + std::vector get_m_sets( + const VariablesProxy &variables, const State &state); - void get_split_m_sets( - const VariablesProxy &variables, std::vector &subsets, + std::vector get_split_m_sets( + const VariablesProxy &variables, const Propositions &superset1, const Propositions &superset2); void print_proposition( From 3fa945fafeb007358240631fc10f4d4d24b92d56 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 12 Mar 2025 01:51:05 +0100 Subject: [PATCH 033/108] Refactor printing of hm-factory. --- src/search/landmarks/landmark_factory_h_m.cc | 207 +++++++++++-------- src/search/landmarks/landmark_factory_h_m.h | 55 +++-- 2 files changed, 159 insertions(+), 103 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 206d633e23..692cccf72f 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -303,134 +303,177 @@ vector LandmarkFactoryHM::get_m_sets( return get_m_sets(variables, state_propositions); } -void LandmarkFactoryHM::print_proposition(const VariablesProxy &variables, const FactPair &proposition) const { +void LandmarkFactoryHM::print_proposition( + const VariablesProxy &variables, const FactPair &proposition) const { if (log.is_at_least_verbose()) { VariableProxy var = variables[proposition.var]; - FactProxy fact = var.get_fact(proposition.value); - log << fact.get_name() - << " (" << var.get_name() << "(" << fact.get_variable().get_id() << ")" - << "->" << fact.get_value() << ")"; + FactProxy atom = var.get_fact(proposition.value); + log << atom.get_name() << " (" + << var.get_name() << "(" << atom.get_variable().get_id() << ")" + << "->" << atom.get_value() << ")"; } } static Propositions get_operator_precondition(const OperatorProxy &op) { - Propositions preconditions = task_properties::get_fact_pairs(op.get_preconditions()); + Propositions preconditions = + task_properties::get_fact_pairs(op.get_preconditions()); sort(preconditions.begin(), preconditions.end()); return preconditions; } -// get facts that are always true after the operator application -// (effects plus prevail conditions) -static Propositions get_operator_postcondition(int num_vars, const OperatorProxy &op) { +/* Get atoms that are always true after the application of `op` + (effects plus prevail conditions). */ +static Propositions get_operator_postcondition( + int num_vars, const OperatorProxy &op) { Propositions postconditions; EffectsProxy effects = op.get_effects(); vector has_effect_on_var(num_vars, false); for (EffectProxy effect : effects) { - FactProxy effect_fact = effect.get_fact(); - postconditions.push_back(effect_fact.get_pair()); - has_effect_on_var[effect_fact.get_variable().get_id()] = true; + FactPair atom = effect.get_fact().get_pair(); + postconditions.push_back(atom); + has_effect_on_var[atom.var] = true; } for (FactProxy precondition : op.get_preconditions()) { - if (!has_effect_on_var[precondition.get_variable().get_id()]) + if (!has_effect_on_var[precondition.get_variable().get_id()]) { postconditions.push_back(precondition.get_pair()); + } } sort(postconditions.begin(), postconditions.end()); return postconditions; } +static set get_as_set( + const vector &collection, const vector &hm_table) { + set preconditions; + for (int element : collection) { + for (const FactPair &proposition : hm_table[element].propositions) { + preconditions.insert(proposition); + } + } + return preconditions; +} -void LandmarkFactoryHM::print_pm_operator(const VariablesProxy &variables, const PiMOperator &op) const { +void LandmarkFactoryHM::print_pm_operator( + const VariablesProxy &variables, const PiMOperator &op) const { if (log.is_at_least_verbose()) { - set pcs, effs, cond_pc, cond_eff; - vector, set>> conds; - - for (int pc : op.precondition) { - for (const FactPair &proposition : hm_table[pc].propositions) { - pcs.insert(proposition); - } - } - for (int eff : op.effect) { - for (const FactPair &proposition : hm_table[eff].propositions) { - effs.insert(proposition); - } + vector, set>> conditions; + for (const auto &conditional_noop : op.conditional_noops) { + print_conditional_noop(variables, conditional_noop, conditions); } - for (size_t i = 0; i < op.conditional_noops.size(); ++i) { - cond_pc.clear(); - cond_eff.clear(); - int pm_proposition; - size_t j; - log << "PC:" << endl; - for (j = 0; (pm_proposition = op.conditional_noops[i][j]) != -1; ++j) { - print_proposition_set(variables, hm_table[pm_proposition].propositions); - log << endl; + print_action(variables, op, conditions); + } +} - for (size_t k = 0; k < hm_table[pm_proposition].propositions.size(); ++k) { - cond_pc.insert(hm_table[pm_proposition].propositions[k]); - } - } - // advance to effects section - log << endl; - ++j; +static pair, vector> split_conditional_noop( + const vector &conditional_noop) { + vector effect_condition; + effect_condition.reserve(conditional_noop.size()); + size_t i; + for (i = 0; conditional_noop[i] != -1; ++i) { + effect_condition.push_back(conditional_noop[i]); + } - log << "EFF:" << endl; - for (; j < op.conditional_noops[i].size(); ++j) { - int pm_proposition = op.conditional_noops[i][j]; + ++i; // Skip delimiter -1. - print_proposition_set(variables, hm_table[pm_proposition].propositions); - log << endl; + vector effect; + effect.reserve(conditional_noop.size()); + for (; i < conditional_noop.size(); ++i) { + effect.push_back(conditional_noop[i]); + } + return {effect_condition, effect}; +} - for (size_t k = 0; k < hm_table[pm_proposition].propositions.size(); ++k) { - cond_eff.insert(hm_table[pm_proposition].propositions[k]); - } - } - conds.emplace_back(cond_pc, cond_eff); - log << endl << endl << endl; - } +void LandmarkFactoryHM::print_conditional_noop( + const VariablesProxy &variables, const vector &conditional_noop, + vector, set>> &conditions) const { + auto [effect_condition, effect] = split_conditional_noop(conditional_noop); + set effect_condition_set = + print_effect_condition(variables, effect_condition); + set effect_set = print_conditional_effect(variables, effect); + conditions.emplace_back(effect_condition_set, effect_set); + log << endl << endl << endl; +} - log << "Action " << op.index << endl; - log << "Precondition: "; - for (const FactPair &pc : pcs) { - print_proposition(variables, pc); +void LandmarkFactoryHM::print_proposition_set( + const VariablesProxy &variables, const Propositions &propositions) const { + if (log.is_at_least_verbose()) { + log << "( "; + for (const FactPair &fact : propositions) { + print_proposition(variables, fact); log << " "; } + log << ")"; + } +} - log << endl << "Effect: "; - for (const FactPair &eff : effs) { - print_proposition(variables, eff); - log << " "; +set LandmarkFactoryHM::print_effect_condition( + const VariablesProxy &variables, const vector &effect_conditions) const { + set effect_condition_set; + log << "effect conditions:\n"; + for (int effect_condition : effect_conditions) { + print_proposition_set( + variables, hm_table[effect_condition].propositions); + log << endl; + for (auto proposition : hm_table[effect_condition].propositions) { + effect_condition_set.insert(proposition); } - log << endl << "Conditionals: " << endl; - int i = 0; - for (const auto &cond : conds) { - log << "Cond PC #" << i++ << ":" << endl << "\t"; - for (const FactPair &pc : cond.first) { - print_proposition(variables, pc); - log << " "; - } - log << endl << "Cond Effect #" << i << ":" << endl << "\t"; - for (const FactPair &eff : cond.second) { - print_proposition(variables, eff); - log << " "; - } - log << endl << endl; + } + return effect_condition_set; +} + +set LandmarkFactoryHM::print_conditional_effect( + const VariablesProxy &variables, const vector &effect) const { + set effect_set; + log << "effect:\n"; + for (int eff : effect) { + print_proposition_set(variables, hm_table[eff].propositions); + log << endl; + for (auto proposition : hm_table[eff].propositions) { + effect_set.insert(proposition); } } + return effect_set; } -void LandmarkFactoryHM::print_proposition_set(const VariablesProxy &variables, const Propositions &fs) const { - if (log.is_at_least_verbose()) { - log << "( "; - for (const FactPair &fact : fs) { - print_proposition(variables, fact); +void LandmarkFactoryHM::print_action( + const VariablesProxy &variables, const PiMOperator &op, + const std::vector, std::set>> &conditions) const { + log << "Action " << op.index << endl; + log << "Precondition: "; + set preconditions = get_as_set(op.precondition, hm_table); + for (const FactPair &pc : preconditions) { + print_proposition(variables, pc); + log << " "; + } + + log << endl << "Effect: "; + set effects = get_as_set(op.effect, hm_table); + for (const FactPair &eff : effects) { + print_proposition(variables, eff); + log << " "; + } + log << endl << "Conditionals: " << endl; + int i = 0; + for (const auto &condition : conditions) { + log << "Effect Condition #" << i++ << ":\n\t"; + for (const FactPair &cond : condition.first) { + print_proposition(variables, cond); log << " "; } - log << ")"; + log << endl << "Conditional Effect #" << i << ":\n\t"; + for (const FactPair &eff : condition.second) { + print_proposition(variables, eff); + log << " "; + } + log << endl << endl; } } +// TODO: Continue from here. + // check whether fs2 is a possible noop set for action with fs1 as effect // sets cannot be 1) defined on same variable, 2) otherwise mutex bool LandmarkFactoryHM::possible_noop_set(const VariablesProxy &variables, diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 7fe5e841e1..12b4f9f211 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -30,8 +30,9 @@ struct PropositionSetComparer { struct PiMOperator { std::vector precondition; std::vector effect; - // TODO: Is this still the case? - // pc separated from effect by a value of -1 + /* In each of the inner vectors, the effect conditions are separated from + the effect values by an entry of the value -1. */ + // TODO: Should it stay this way? std::vector> conditional_noops; int index; }; @@ -70,6 +71,24 @@ using PropositionSetToIntMap = class LandmarkFactoryHM : public LandmarkFactory { using TriggerSet = std::unordered_map>; + const int m; + const bool conjunctive_landmarks; + const bool use_orders; + + std::unordered_map landmark_node_table; + + std::vector hm_table; + std::vector pm_operators; + // Maps each set of <= m propositions to an int. TODO: What does this int indicate? + PropositionSetToIntMap set_indices; + /* + The number in the first position represents the amount of unsatisfied + preconditions of the operator. The vector of numbers in the second + position represents the amount of unsatisfied preconditions for each + conditional noop operator. + */ + std::vector>> unsatisfied_precondition_count; + virtual void generate_landmarks( const std::shared_ptr &task) override; @@ -104,27 +123,21 @@ class LandmarkFactoryHM : public LandmarkFactory { void free_unneeded_memory(); void print_proposition_set( - const VariablesProxy &variables, const Propositions &fs) const; + const VariablesProxy &variables, const Propositions &propositions) const; void print_pm_operator( const VariablesProxy &variables, const PiMOperator &op) const; - - const int m; - const bool conjunctive_landmarks; - const bool use_orders; - - std::unordered_map landmark_node_table; - - std::vector hm_table; - std::vector pm_operators; - // Maps each set of >> unsatisfied_precondition_count; + void print_conditional_noop( + const VariablesProxy &variables, + const std::vector &conditional_noop, + std::vector, std::set>> &conditions) const; + std::set print_effect_condition( + const VariablesProxy &variables, + const std::vector &effect_condition) const; + std::set print_conditional_effect( + const VariablesProxy &variables, const std::vector &effect) const; + void print_action( + const VariablesProxy &variables, const PiMOperator &op, + const std::vector, std::set>> &conditions) const; void get_m_sets_including_current_var( const VariablesProxy &variables, int num_included, int current_var, From 11de2f8e45818f43a8b3740a910723af7cbfcc7b Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 12 Mar 2025 11:23:57 +0100 Subject: [PATCH 034/108] Turn function static. --- src/search/landmarks/landmark_factory_h_m.cc | 43 +++++++++----------- src/search/landmarks/landmark_factory_h_m.h | 3 -- 2 files changed, 20 insertions(+), 26 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 692cccf72f..fc67032ea0 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -472,39 +472,35 @@ void LandmarkFactoryHM::print_action( } } -// TODO: Continue from here. - -// check whether fs2 is a possible noop set for action with fs1 as effect -// sets cannot be 1) defined on same variable, 2) otherwise mutex -bool LandmarkFactoryHM::possible_noop_set(const VariablesProxy &variables, - const Propositions &propositions1, - const Propositions &propositions2) { - Propositions::const_iterator fs1it = propositions1.begin(), fs2it = propositions2.begin(); - - while (fs1it != propositions1.end() && fs2it != propositions2.end()) { - if (fs1it->var == fs2it->var) { +static bool proposition_set_variables_disjoint( + const Propositions &propositions1, const Propositions &propositions2) { + auto it1 = propositions1.begin(); + auto it2 = propositions2.begin(); + while (it1 != propositions1.end() && it2 != propositions2.end()) { + if (it1->var == it2->var) { return false; - } else if (fs1it->var < fs2it->var) { - ++fs1it; + } else if (it1->var < it2->var) { + ++it1; } else { - ++fs2it; + ++it2; } } + return true; +} - for (const FactPair &proposition1 : propositions1) { - FactProxy fact1 = variables[proposition1.var].get_fact(proposition1.value); - for (const FactPair &proposition2 : propositions2) { - if (fact1.is_mutex( - variables[proposition2.var].get_fact(proposition2.value))) +static bool proposition_sets_are_mutex( + const VariablesProxy &variables, const Propositions &propositions1, + const Propositions &propositions2) { + for (const FactPair &atom1 : propositions1) { + for (const FactPair &atom2 : propositions2) { + if (are_mutex(variables, atom1, atom2)) { return false; + } } } - return true; } - -// make the operators of the P_m problem void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { Propositions pc, eff; static int op_count = 0; @@ -559,7 +555,8 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { PropositionSetToIntMap::const_iterator it = set_indices.begin(); while (static_cast(it->first.size()) < m && it != set_indices.end()) { - if (possible_noop_set(variables, eff, it->first)) { + if (proposition_set_variables_disjoint(eff, it->first) && + proposition_sets_are_mutex(variables, eff, it->first)) { // for each such set, add a "conditional effect" to the operator pm_op.conditional_noops.resize(pm_op.conditional_noops.size() + 1); diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 12b4f9f211..2c30aacd35 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -102,9 +102,6 @@ class LandmarkFactoryHM : public LandmarkFactory { void propagate_pm_atoms(int atom_index, bool newly_discovered, TriggerSet &trigger); - bool possible_noop_set(const VariablesProxy &variables, - const Propositions &propositions1, - const Propositions &propositions2); void build_pm_operators(const TaskProxy &task_proxy); // TODO: What is interesting? bool interesting(const VariablesProxy &variables, From 444f205f808ab1d3bda8311f64160b76207a1136 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 12 Mar 2025 15:14:34 +0100 Subject: [PATCH 035/108] Break apart function to build Pi^m operators. --- src/search/landmarks/landmark_factory_h_m.cc | 226 +++++++++++-------- src/search/landmarks/landmark_factory_h_m.h | 20 +- 2 files changed, 144 insertions(+), 102 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index fc67032ea0..0a9433c592 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -11,6 +11,7 @@ #include "../utils/logging.h" #include "../utils/system.h" +#include #include using namespace std; @@ -501,106 +502,135 @@ static bool proposition_sets_are_mutex( return true; } -void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { - Propositions pc, eff; - static int op_count = 0; - int set_index, noop_index; - - OperatorsProxy operators = task_proxy.get_operators(); - pm_operators.resize(operators.size()); +Propositions LandmarkFactoryHM::initialize_preconditions( + const VariablesProxy &variables, const OperatorProxy &op, + PiMOperator &pm_op) { + /* All subsets of the original precondition are preconditions of the + P_m operator. */ + Propositions precondition = get_operator_precondition(op); + vector subsets = get_m_sets(variables, precondition); + pm_op.precondition.reserve(subsets.size()); + + num_unsatisfied_preconditions[op.get_id()].first = + static_cast(subsets.size()); + + for (const Propositions &subset : subsets) { + assert(set_indices.contains(subset)); + int set_index = set_indices[subset]; + pm_op.precondition.push_back(set_index); + // TODO: Do not abuse FactPair here!!! + hm_table[set_index].pc_for.emplace_back(op.get_id(), -1); + } + return precondition; +} - // set unsatisfied precondition counts, used in fixpoint calculation - unsatisfied_precondition_count.resize(operators.size()); +Propositions LandmarkFactoryHM::initialize_postconditions( + const VariablesProxy &variables, const OperatorProxy &op, + PiMOperator &pm_op) { + Propositions postcondition = get_operator_postcondition( + static_cast(variables.size()), op); + vector subsets = get_m_sets(variables, postcondition); + pm_op.effect.reserve(subsets.size()); + + for (const Propositions &subset : subsets) { + assert(set_indices.contains(subset)); + int set_index = set_indices[subset]; + pm_op.effect.push_back(set_index); + } + return postcondition; +} - VariablesProxy variables = task_proxy.get_variables(); +void LandmarkFactoryHM::add_conditional_noop( + PiMOperator &pm_op, int op_id, + const VariablesProxy &variables, const Propositions &propositions, + const Propositions &preconditions, const Propositions &postconditions) { + int noop_index = static_cast(pm_op.conditional_noops.size()); + + /* + Get the subsets that have >= 1 element in the precondition (unless + the precondition is empty) or the postcondition and >= 1 element + in the `propositions` set. + */ + vector noop_precondition_subsets = + get_split_m_sets(variables, preconditions, propositions); + vector noop_postconditions_subsets = + get_split_m_sets(variables, postconditions, propositions); + + vector conditional_noop; + conditional_noop.reserve(noop_precondition_subsets.size() + + noop_postconditions_subsets.size() + 1); + num_unsatisfied_preconditions[op_id].second.push_back( + static_cast(noop_precondition_subsets.size())); + + // Add the conditional noop preconditions. + for (const auto & subset : noop_precondition_subsets) { + assert(static_cast(subset.size()) <= m); + assert(set_indices.contains(subset)); + int set_index = set_indices[subset]; + conditional_noop.push_back(set_index); + // These propositions are "conditional preconditions" for this operator. + hm_table[set_index].pc_for.emplace_back(op_id, noop_index); + } - // transfer ops from original problem - // represent noops as "conditional" effects - for (OperatorProxy op : operators) { - PiMOperator &pm_op = pm_operators[op.get_id()]; - pm_op.index = op_count++; + // Separate conditional preconditions from conditional effects by number -1. + conditional_noop.push_back(-1); - // preconditions of P_m op are all subsets of original pc - pc = get_operator_precondition(op); - vector pc_subsets = get_m_sets(variables, pc); - pm_op.precondition.reserve(pc_subsets.size()); + // Add the conditional noop effects. + for (const auto & subset : noop_postconditions_subsets) { + assert(static_cast(subset.size()) <= m); + assert(set_indices.contains(subset)); + int set_index = set_indices[subset]; + conditional_noop.push_back(set_index); + } - // set unsatisfied pc count for op - unsatisfied_precondition_count[op.get_id()].first = pc_subsets.size(); + pm_op.conditional_noops.push_back(move(conditional_noop)); +} - for (const Propositions &pc_subset : pc_subsets) { - assert(set_indices.find(pc_subset) != set_indices.end()); - set_index = set_indices[pc_subset]; - pm_op.precondition.push_back(set_index); - hm_table[set_index].pc_for.emplace_back(op.get_id(), -1); +void LandmarkFactoryHM::initialize_noops( + const VariablesProxy &variables, PiMOperator &pm_op, int op_id, + const Propositions &preconditions, const Propositions &postconditions) { + pm_op.conditional_noops.reserve(set_indices.size()); + /* + For all subsets used in the problem with size *<* m, check whether + they conflict with the postcondition of the operator. (No need to + check the precondition because variables appearing in the precondition + also appear in the postcondition.) + */ + for (const Propositions &propositions : views::keys(set_indices)) { + if (static_cast(propositions.size()) >= m) { + break; } - - // same for effects - eff = get_operator_postcondition(variables.size(), op); - vector eff_subsets = get_m_sets(variables, eff); - pm_op.effect.reserve(eff_subsets.size()); - - for (const Propositions &eff_subset : eff_subsets) { - assert(set_indices.find(eff_subset) != set_indices.end()); - set_index = set_indices[eff_subset]; - pm_op.effect.push_back(set_index); + if (proposition_set_variables_disjoint(postconditions, propositions) + && proposition_sets_are_mutex(variables, postconditions, + propositions)) { + // For each such set, add a "conditional effect" to the operator. + add_conditional_noop(pm_op, op_id, variables, + propositions, preconditions, postconditions); } + } +} - noop_index = 0; - - // For all subsets used in the problem with size *<* m, check whether - // they conflict with the effect of the operator (no need to check pc - // because mvvs appearing in pc also appear in effect - - PropositionSetToIntMap::const_iterator it = set_indices.begin(); - while (static_cast(it->first.size()) < m - && it != set_indices.end()) { - if (proposition_set_variables_disjoint(eff, it->first) && - proposition_sets_are_mutex(variables, eff, it->first)) { - // for each such set, add a "conditional effect" to the operator - pm_op.conditional_noops.resize(pm_op.conditional_noops.size() + 1); - - vector &this_cond_noop = pm_op.conditional_noops.back(); - - // get the subsets that have >= 1 element in the pc (unless pc is empty) - // and >= 1 element in the other set - - vector noop_pc_subsets = - get_split_m_sets(variables, pc, it->first); - vector noop_eff_subsets = - get_split_m_sets(variables, eff, it->first); - - this_cond_noop.reserve(noop_pc_subsets.size() + noop_eff_subsets.size() + 1); - - unsatisfied_precondition_count[op.get_id()].second.push_back(noop_pc_subsets.size()); - - // push back all noop preconditions - for (size_t j = 0; j < noop_pc_subsets.size(); ++j) { - assert(static_cast(noop_pc_subsets[j].size()) <= m); - assert(set_indices.find(noop_pc_subsets[j]) != set_indices.end()); - - set_index = set_indices[noop_pc_subsets[j]]; - this_cond_noop.push_back(set_index); - // these facts are "conditional pcs" for this action - hm_table[set_index].pc_for.emplace_back(op.get_id(), noop_index); - } - - // separator - this_cond_noop.push_back(-1); - - // and the noop effects - for (size_t j = 0; j < noop_eff_subsets.size(); ++j) { - assert(static_cast(noop_eff_subsets[j].size()) <= m); - assert(set_indices.find(noop_eff_subsets[j]) != set_indices.end()); +void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { + OperatorsProxy operators = task_proxy.get_operators(); + int num_operators = static_cast(operators.size()); + pm_operators.resize(num_operators); + num_unsatisfied_preconditions.resize(num_operators); - set_index = set_indices[noop_eff_subsets[j]]; - this_cond_noop.push_back(set_index); - } + VariablesProxy variables = task_proxy.get_variables(); - ++noop_index; - } - ++it; - } + /* Transfer operators from original problem. + Represent noops as conditional effects. */ + for (int i = 0; i < num_operators; ++i) { + const OperatorProxy &op = operators[i]; + PiMOperator &pm_op = pm_operators[op.get_id()]; + pm_op.index = i; + + Propositions preconditions = + initialize_preconditions(variables, op, pm_op); + Propositions postconditions = + initialize_postconditions(variables, op, pm_op); + initialize_noops( + variables, pm_op, op.get_id(), preconditions, postconditions); print_pm_operator(variables, pm_op); } } @@ -729,7 +759,7 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { void LandmarkFactoryHM::free_unneeded_memory() { utils::release_vector_memory(hm_table); utils::release_vector_memory(pm_operators); - utils::release_vector_memory(unsatisfied_precondition_count); + utils::release_vector_memory(num_unsatisfied_preconditions); set_indices.clear(); landmark_node_table.clear(); @@ -745,10 +775,10 @@ void LandmarkFactoryHM::propagate_pm_atoms(int atom_index, bool newly_discovered // a pc for the action itself if (info.value == -1) { if (newly_discovered) { - --unsatisfied_precondition_count[info.var].first; + --num_unsatisfied_preconditions[info.var].first; } // add to queue if unsatcount at 0 - if (unsatisfied_precondition_count[info.var].first == 0) { + if (num_unsatisfied_preconditions[info.var].first == 0) { // create empty set or clear prev entries -- signals do all possible noop effects trigger[info.var].clear(); } @@ -756,12 +786,12 @@ void LandmarkFactoryHM::propagate_pm_atoms(int atom_index, bool newly_discovered // a pc for a conditional noop else { if (newly_discovered) { - --unsatisfied_precondition_count[info.var].second[info.value]; + --num_unsatisfied_preconditions[info.var].second[info.value]; } // if associated action is applicable, and effect has become applicable // (if associated action is not applicable, all noops will be used when it first does) - if ((unsatisfied_precondition_count[info.var].first == 0) && - (unsatisfied_precondition_count[info.var].second[info.value] == 0)) { + if ((num_unsatisfied_preconditions[info.var].first == 0) && + (num_unsatisfied_preconditions[info.var].second[info.value] == 0)) { // if not already triggering all noops, add this one if ((trigger.find(info.var) == trigger.end()) || (!trigger[info.var].empty())) { @@ -790,7 +820,7 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { // mark actions with no precondition to be applied for (size_t i = 0; i < pm_operators.size(); ++i) { - if (unsatisfied_precondition_count[i].first == 0) { + if (num_unsatisfied_preconditions[i].first == 0) { // create empty set or clear prev entries current_trigger[i].clear(); } @@ -862,7 +892,7 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { for (size_t i = 0; i < action.conditional_noops.size(); ++i) { // actions pcs are satisfied, but cond. effects may still have // unsatisfied pcs - if (unsatisfied_precondition_count[op_index].second[i] == 0) { + if (num_unsatisfied_preconditions[op_index].second[i] == 0) { compute_noop_landmarks(op_index, i, local_landmarks, local_necessary, @@ -875,7 +905,7 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { else { for (set::iterator noop_it = op_it->second.begin(); noop_it != op_it->second.end(); ++noop_it) { - assert(unsatisfied_precondition_count[op_index].second[*noop_it] == 0); + assert(num_unsatisfied_preconditions[op_index].second[*noop_it] == 0); compute_noop_landmarks(op_index, *noop_it, local_landmarks, diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 2c30aacd35..bbf7f7844c 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -32,7 +32,6 @@ struct PiMOperator { std::vector effect; /* In each of the inner vectors, the effect conditions are separated from the effect values by an entry of the value -1. */ - // TODO: Should it stay this way? std::vector> conditional_noops; int index; }; @@ -79,15 +78,15 @@ class LandmarkFactoryHM : public LandmarkFactory { std::vector hm_table; std::vector pm_operators; - // Maps each set of <= m propositions to an int. TODO: What does this int indicate? + // Maps each set of < m propositions to an int. TODO: What does this int indicate? PropositionSetToIntMap set_indices; /* The number in the first position represents the amount of unsatisfied preconditions of the operator. The vector of numbers in the second position represents the amount of unsatisfied preconditions for each conditional noop operator. - */ - std::vector>> unsatisfied_precondition_count; + */ + std::vector>> num_unsatisfied_preconditions; virtual void generate_landmarks( const std::shared_ptr &task) override; @@ -102,6 +101,19 @@ class LandmarkFactoryHM : public LandmarkFactory { void propagate_pm_atoms(int atom_index, bool newly_discovered, TriggerSet &trigger); + Propositions initialize_preconditions( + const VariablesProxy &variables, const OperatorProxy &op, + PiMOperator &pm_op); + Propositions initialize_postconditions( + const VariablesProxy &variables, const OperatorProxy &op, + PiMOperator &pm_op); + void add_conditional_noop( + PiMOperator &pm_op, int op_id, + const VariablesProxy &variables, const Propositions &propositions, + const Propositions &preconditions, const Propositions &postconditions); + void initialize_noops( + const VariablesProxy &variables, PiMOperator &pm_op, int op_id, + const Propositions &preconditions, const Propositions &postconditions); void build_pm_operators(const TaskProxy &task_proxy); // TODO: What is interesting? bool interesting(const VariablesProxy &variables, From 55c64f6e985565db677b7e0f0568dcb070336c10 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 12 Mar 2025 19:44:06 +0100 Subject: [PATCH 036/108] Clarify hm initialization. --- src/search/landmarks/landmark_factory_h_m.cc | 31 ++++++++------------ src/search/landmarks/landmark_factory_h_m.h | 16 ++++------ 2 files changed, 18 insertions(+), 29 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 0a9433c592..f1c70cb7d3 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -635,14 +635,6 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { } } -// TODO: Replace usages of `interesting` with `are_mutex` above. -bool LandmarkFactoryHM::interesting(const VariablesProxy &variables, - const FactPair &fact1, const FactPair &fact2) const { - // mutexes can always be safely pruned - return !variables[fact1.var].get_fact(fact1.value).is_mutex( - variables[fact2.var].get_fact(fact2.value)); -} - LandmarkFactoryHM::LandmarkFactoryHM( int m, bool conjunctive_landmarks, bool use_orders, utils::Verbosity verbosity) @@ -652,6 +644,17 @@ LandmarkFactoryHM::LandmarkFactoryHM( use_orders(use_orders) { } +void LandmarkFactoryHM::initialize_hm_table(const VariablesProxy &variables) { + // Get all sets of size m or less in the problem. + vector> msets = get_m_sets(variables); + + // Map each set to an integer. + for (int i = 0; i < static_cast(msets.size()); ++i) { + set_indices[msets[i]] = i; + hm_table.emplace_back(move(msets[i])); + } +} + void LandmarkFactoryHM::initialize(const TaskProxy &task_proxy) { if (log.is_at_least_normal()) { log << "h^m landmarks m=" << m << endl; @@ -660,20 +663,10 @@ void LandmarkFactoryHM::initialize(const TaskProxy &task_proxy) { cerr << "h^m landmarks don't support axioms" << endl; utils::exit_with(ExitCode::SEARCH_UNSUPPORTED); } - // Get all the m or less size subsets in the domain. - vector> msets = - get_m_sets(task_proxy.get_variables()); - - // map each set to an integer - for (size_t i = 0; i < msets.size(); ++i) { - hm_table.emplace_back(); - set_indices[msets[i]] = i; - hm_table[i].propositions = msets[i]; - } + initialize_hm_table(task_proxy.get_variables()); if (log.is_at_least_normal()) { log << "Using " << hm_table.size() << " P^m propositions." << endl; } - build_pm_operators(task_proxy); } diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index bbf7f7844c..469e1cbb14 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -39,9 +39,9 @@ struct PiMOperator { // represents a proposition in the P^m problem struct HMEntry { // Propositions that belong to this set. - Propositions propositions; - // -1 -> current cost infinite - // 0 -> present in initial state + const Propositions propositions; + // Level -1: current cost infinite + // Level 0: present in initial state int level; // TODO: Can we replace the `list` data type? @@ -58,9 +58,8 @@ struct HMEntry { -1 for op itself */ std::vector pc_for; - // TODO: Maybe set the propositions in the constructor as well? - HMEntry() - : level(-1) { + explicit HMEntry(Propositions &&propositions) + : propositions(move(propositions)), level(-1) { } }; @@ -115,10 +114,6 @@ class LandmarkFactoryHM : public LandmarkFactory { const VariablesProxy &variables, PiMOperator &pm_op, int op_id, const Propositions &preconditions, const Propositions &postconditions); void build_pm_operators(const TaskProxy &task_proxy); - // TODO: What is interesting? - bool interesting(const VariablesProxy &variables, - const FactPair &fact1, - const FactPair &fact2) const; void postprocess(const TaskProxy &task_proxy); @@ -128,6 +123,7 @@ class LandmarkFactoryHM : public LandmarkFactory { void add_landmark_node(int set_index, bool goal = false); + void initialize_hm_table(const VariablesProxy &variables); void initialize(const TaskProxy &task_proxy); void free_unneeded_memory(); From 82e21b183d6e1df5b2bd2fc0e063c397933aed42 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 13 Mar 2025 13:41:14 +0100 Subject: [PATCH 037/108] Clarify approxiation of possible achievers. --- src/search/landmarks/landmark_factory_h_m.cc | 138 ++++++++++--------- src/search/landmarks/landmark_factory_h_m.h | 3 + 2 files changed, 73 insertions(+), 68 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index f1c70cb7d3..6acf0cf6ba 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -346,15 +346,15 @@ static Propositions get_operator_postcondition( return postconditions; } -static set get_as_set( +static set get_propositions( const vector &collection, const vector &hm_table) { - set preconditions; + set propositions; for (int element : collection) { for (const FactPair &proposition : hm_table[element].propositions) { - preconditions.insert(proposition); + propositions.insert(proposition); } } - return preconditions; + return propositions; } void LandmarkFactoryHM::print_pm_operator( @@ -444,29 +444,29 @@ void LandmarkFactoryHM::print_action( const std::vector, std::set>> &conditions) const { log << "Action " << op.index << endl; log << "Precondition: "; - set preconditions = get_as_set(op.precondition, hm_table); - for (const FactPair &pc : preconditions) { - print_proposition(variables, pc); + set preconditions = get_propositions(op.precondition, hm_table); + for (const FactPair &precondition : preconditions) { + print_proposition(variables, precondition); log << " "; } log << endl << "Effect: "; - set effects = get_as_set(op.effect, hm_table); - for (const FactPair &eff : effects) { - print_proposition(variables, eff); + set effects = get_propositions(op.effect, hm_table); + for (const FactPair &effect : effects) { + print_proposition(variables, effect); log << " "; } log << endl << "Conditionals: " << endl; int i = 0; - for (const auto &condition : conditions) { + for (const auto &[effect_conditions, effects] : conditions) { log << "Effect Condition #" << i++ << ":\n\t"; - for (const FactPair &cond : condition.first) { - print_proposition(variables, cond); + for (const FactPair &condition : effect_conditions) { + print_proposition(variables, condition); log << " "; } log << endl << "Conditional Effect #" << i << ":\n\t"; - for (const FactPair &eff : condition.second) { - print_proposition(variables, eff); + for (const FactPair &effect : effects) { + print_proposition(variables, effect); log << " "; } log << endl << endl; @@ -678,13 +678,59 @@ void LandmarkFactoryHM::postprocess(const TaskProxy &task_proxy) { } void LandmarkFactoryHM::discard_conjunctive_landmarks() { - if (landmark_graph->get_num_conjunctive_landmarks() > 0) { - if (log.is_at_least_normal()) { - log << "Discarding " << landmark_graph->get_num_conjunctive_landmarks() - << " conjunctive landmarks" << endl; + if (landmark_graph->get_num_conjunctive_landmarks() == 0) { + return; + } + if (log.is_at_least_normal()) { + log << "Discarding " << landmark_graph->get_num_conjunctive_landmarks() + << " conjunctive landmarks" << endl; + } + landmark_graph->remove_node_if( + [](const LandmarkNode &node) { + return node.get_landmark().is_conjunctive; + }); +} + +static bool operator_can_achieve_landmark( + const OperatorProxy &op, const Landmark &landmark, + const VariablesProxy &variables) { + Propositions precondition = get_operator_precondition(op); + Propositions postcondition = + get_operator_postcondition(static_cast(variables.size()), op); + + for (const FactPair &atom : landmark.atoms) { + if (find(postcondition.begin(), postcondition.end(), atom) != + postcondition.end()) { + // `atom` is a postcondition of `op`. + continue; + } + auto is_mutex = [&](const FactPair &other) { + return are_mutex(variables, atom, other); + }; + if (any_of(postcondition.begin(), postcondition.end(), is_mutex) || + /* TODO: Since the precondition is factored into the postcondition, + I don't think we actually need this second `any_of` case. */ + any_of(precondition.begin(), precondition.end(), is_mutex)) { + return false; + } + } + return true; +} + +void LandmarkFactoryHM::approximate_possible_achievers( + Landmark &landmark, const OperatorsProxy &operators, + const VariablesProxy &variables) const { + unordered_set candidates; + for (const FactPair &atom : landmark.atoms) { + const vector &ops = get_operators_including_effect(atom); + candidates.insert(ops.begin(), ops.end()); + } + + for (int op_id : candidates) { + if (operator_can_achieve_landmark( + operators[op_id], landmark, variables)) { + landmark.possible_achievers.insert(op_id); } - landmark_graph->remove_node_if( - [](const LandmarkNode &node) {return node.get_landmark().is_conjunctive;}); } } @@ -696,55 +742,11 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { OperatorsProxy operators = task_proxy.get_operators(); VariablesProxy variables = task_proxy.get_variables(); - // first_achievers are already filled in by compute_h_m_landmarks - // here only have to do possible_achievers + /* The `first_achievers` are already filled in by `compute_h_m_landmarks`, + so here we only have to do `possible_achievers` */ for (const auto &node : *landmark_graph) { Landmark &landmark = node->get_landmark(); - set candidates; - // put all possible adders in candidates set - for (const FactPair &atom : landmark.atoms) { - const vector &ops = get_operators_including_effect(atom); - candidates.insert(ops.begin(), ops.end()); - } - - for (int op_id : candidates) { - Propositions post = get_operator_postcondition(variables.size(), operators[op_id]); - Propositions pre = get_operator_precondition(operators[op_id]); - size_t j; - for (j = 0; j < landmark.atoms.size(); ++j) { - const FactPair &atom = landmark.atoms[j]; - // action adds this element of landmark as well - if (find(post.begin(), post.end(), atom) != post.end()) - continue; - bool is_mutex = false; - for (const FactPair &proposition : post) { - if (variables[proposition.var].get_fact(proposition.value).is_mutex( - variables[atom.var].get_fact(atom.value))) { - is_mutex = true; - break; - } - } - if (is_mutex) { - break; - } - for (const FactPair &proposition : pre) { - // we know that lm_val is not added by the operator - // so if it incompatible with the pc, this can't be an achiever - if (variables[proposition.var].get_fact(proposition.value).is_mutex( - variables[atom.var].get_fact(atom.value))) { - is_mutex = true; - break; - } - } - if (is_mutex) { - break; - } - } - if (j == landmark.atoms.size()) { - // not inconsistent with any of the other landmark propositions - landmark.possible_achievers.insert(op_id); - } - } + approximate_possible_achievers(landmark, operators, variables); } achievers_calculated = true; } diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 469e1cbb14..48da964bec 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -119,6 +119,9 @@ class LandmarkFactoryHM : public LandmarkFactory { void discard_conjunctive_landmarks(); + void approximate_possible_achievers( + Landmark &landmark, const OperatorsProxy &operators, + const VariablesProxy &variables) const; void calc_achievers(const TaskProxy &task_proxy); void add_landmark_node(int set_index, bool goal = false); From 05fe4c62f150738e20cb5e83df8f0bf1512aa319 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 13 Mar 2025 16:05:14 +0100 Subject: [PATCH 038/108] Break apart pi-m propagation to trigger applicable operator updates. --- src/search/landmarks/landmark_factory_h_m.cc | 99 +++++++++++--------- src/search/landmarks/landmark_factory_h_m.h | 21 +++-- 2 files changed, 69 insertions(+), 51 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 6acf0cf6ba..f81ce337af 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -518,8 +518,7 @@ Propositions LandmarkFactoryHM::initialize_preconditions( assert(set_indices.contains(subset)); int set_index = set_indices[subset]; pm_op.precondition.push_back(set_index); - // TODO: Do not abuse FactPair here!!! - hm_table[set_index].pc_for.emplace_back(op.get_id(), -1); + hm_table[set_index].triggered_operators.emplace_back(op.get_id(), -1); } return precondition; } @@ -569,7 +568,7 @@ void LandmarkFactoryHM::add_conditional_noop( int set_index = set_indices[subset]; conditional_noop.push_back(set_index); // These propositions are "conditional preconditions" for this operator. - hm_table[set_index].pc_for.emplace_back(op_id, noop_index); + hm_table[set_index].triggered_operators.emplace_back(op_id, noop_index); } // Separate conditional preconditions from conditional effects by number -1. @@ -701,16 +700,16 @@ static bool operator_can_achieve_landmark( for (const FactPair &atom : landmark.atoms) { if (find(postcondition.begin(), postcondition.end(), atom) != postcondition.end()) { - // `atom` is a postcondition of `op`. + // This `atom` is a postcondition of `op`, move on to the next one. continue; } - auto is_mutex = [&](const FactPair &other) { + auto mutex = [&](const FactPair &other) { return are_mutex(variables, atom, other); }; - if (any_of(postcondition.begin(), postcondition.end(), is_mutex) || + if (any_of(postcondition.begin(), postcondition.end(), mutex) || /* TODO: Since the precondition is factored into the postcondition, I don't think we actually need this second `any_of` case. */ - any_of(precondition.begin(), precondition.end(), is_mutex)) { + any_of(precondition.begin(), precondition.end(), mutex)) { return false; } } @@ -760,39 +759,51 @@ void LandmarkFactoryHM::free_unneeded_memory() { landmark_node_table.clear(); } -// called when a fact is discovered or its landmarks change -// to trigger required actions at next level -// newly_discovered = first time fact becomes reachable -void LandmarkFactoryHM::propagate_pm_atoms(int atom_index, bool newly_discovered, - TriggerSet &trigger) { - // for each action/noop for which fact is a pc - for (const FactPair &info : hm_table[atom_index].pc_for) { - // a pc for the action itself - if (info.value == -1) { - if (newly_discovered) { - --num_unsatisfied_preconditions[info.var].first; - } - // add to queue if unsatcount at 0 - if (num_unsatisfied_preconditions[info.var].first == 0) { - // create empty set or clear prev entries -- signals do all possible noop effects - trigger[info.var].clear(); - } +void LandmarkFactoryHM::trigger_operator( + int op_id, bool newly_discovered, TriggerSet &trigger) { + if (newly_discovered) { + --num_unsatisfied_preconditions[op_id].first; + } + if (num_unsatisfied_preconditions[op_id].first == 0) { + /* + Clear trigger for `op_id` (or create entry if it does not yet + exist) to indicate that the precondition of the corresponding + operator is satisfied and all conditional noops are triggered. + */ + trigger[op_id].clear(); + } +} + +void LandmarkFactoryHM::trigger_conditional_noop( + int op_id, int noop_id, bool newly_discovered, TriggerSet &trigger) { + if (newly_discovered) { + --num_unsatisfied_preconditions[op_id].second[noop_id]; + } + /* If the operator is applicable and the effect condition is + satisfied, then the effect is triggered. */ + if (num_unsatisfied_preconditions[op_id].first == 0 && + num_unsatisfied_preconditions[op_id].second[noop_id] == 0) { + /* + The trigger for `op_id` being empty indicates that all noops are + triggered anyway. Testing `contains` first is necessary to not + generate the (empty) entry for `op_id` when using the [] operator. + */ + if (!trigger.contains(op_id) || !trigger[op_id].empty()) { + trigger[op_id].insert(noop_id); } - // a pc for a conditional noop - else { - if (newly_discovered) { - --num_unsatisfied_preconditions[info.var].second[info.value]; - } - // if associated action is applicable, and effect has become applicable - // (if associated action is not applicable, all noops will be used when it first does) - if ((num_unsatisfied_preconditions[info.var].first == 0) && - (num_unsatisfied_preconditions[info.var].second[info.value] == 0)) { - // if not already triggering all noops, add this one - if ((trigger.find(info.var) == trigger.end()) || - (!trigger[info.var].empty())) { - trigger[info.var].insert(info.value); - } - } + } +} + +// Triggers which operators are reevaluated at the next level. +void LandmarkFactoryHM::propagate_pm_propositions( + int proposition_id, bool newly_discovered, TriggerSet &trigger) { + // For each operator/noop for which the proposition is a precondition. + for (auto [op_id, noop_id] : hm_table[proposition_id].triggered_operators) { + if (noop_id == -1) { + // The proposition is a precondition for the operator itself. + trigger_operator(op_id, newly_discovered, trigger); + } else { + trigger_conditional_noop(op_id, noop_id, newly_discovered, trigger); } } } @@ -810,7 +821,7 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { hm_table[index].level = 0; // set actions to be applied - propagate_pm_atoms(index, true, current_trigger); + propagate_pm_propositions(index, true, current_trigger); } // mark actions with no precondition to be applied @@ -869,7 +880,7 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { } if (hm_table[*it].landmarks.size() != prev_size) - propagate_pm_atoms(*it, false, next_trigger); + propagate_pm_propositions(*it, false, next_trigger); } else { hm_table[*it].level = level; hm_table[*it].landmarks = local_landmarks; @@ -877,7 +888,7 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { hm_table[*it].necessary = local_necessary; } insert_into(hm_table[*it].first_achievers, op_index); - propagate_pm_atoms(*it, true, next_trigger); + propagate_pm_propositions(*it, true, next_trigger); } } @@ -975,7 +986,7 @@ void LandmarkFactoryHM::compute_noop_landmarks( } if (hm_table[pm_proposition].landmarks.size() != prev_size) - propagate_pm_atoms(pm_proposition, false, next_trigger); + propagate_pm_propositions(pm_proposition, false, next_trigger); } else { hm_table[pm_proposition].level = level; hm_table[pm_proposition].landmarks = cn_landmarks; @@ -983,7 +994,7 @@ void LandmarkFactoryHM::compute_noop_landmarks( hm_table[pm_proposition].necessary = cn_necessary; } insert_into(hm_table[pm_proposition].first_achievers, op_index); - propagate_pm_atoms(pm_proposition, true, next_trigger); + propagate_pm_propositions(pm_proposition, true, next_trigger); } } } diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 48da964bec..606638f5a7 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -52,11 +52,13 @@ struct HMEntry { std::list first_achievers; - /* TODO: What's the meaning of this? Is it actually using a FactPair to - represent something completely unrelated?!? */ - /* First int = op index, second int conditional noop effect - -1 for op itself */ - std::vector pc_for; + /* + The first int represents an operator ID. If the second int is -1 it means + the `propositions` are a precondition of the corresponding operator. If + the second int is >= 0 it points to the respective conditional noop for + which `propositions` occur in the effect condition. + */ + std::vector> triggered_operators; explicit HMEntry(Propositions &&propositions) : propositions(move(propositions)), level(-1) { @@ -85,6 +87,7 @@ class LandmarkFactoryHM : public LandmarkFactory { position represents the amount of unsatisfied preconditions for each conditional noop operator. */ + // TODO: Instead reserve the first entry of the vector for the operator itself. std::vector>> num_unsatisfied_preconditions; virtual void generate_landmarks( @@ -97,8 +100,12 @@ class LandmarkFactoryHM : public LandmarkFactory { int level, TriggerSet &next_trigger); - void propagate_pm_atoms(int atom_index, bool newly_discovered, - TriggerSet &trigger); + void trigger_operator( + int op_id, bool newly_discovered, TriggerSet &trigger); + void trigger_conditional_noop( + int op_id, int noop_id, bool newly_discovered, TriggerSet &trigger); + void propagate_pm_propositions( + int proposition_id, bool newly_discovered, TriggerSet &trigger); Propositions initialize_preconditions( const VariablesProxy &variables, const OperatorProxy &op, From 19aac6fd1e8dffd1e9cbcce6f2ed898a5ff33ed9 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 13 Mar 2025 17:49:53 +0100 Subject: [PATCH 039/108] Break apart computing the landmarks. --- src/search/landmarks/landmark_factory_h_m.cc | 221 ++++++++++--------- src/search/landmarks/landmark_factory_h_m.h | 24 +- 2 files changed, 129 insertions(+), 116 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index f81ce337af..52d9ca1288 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -101,7 +101,7 @@ void insert_into(list &alist, const T &val) { } template -static bool contains(list &alist, const T &val) { +static bool contains(const list &alist, const T &val) { return find(alist.begin(), alist.end(), val) != alist.end(); } @@ -442,7 +442,7 @@ set LandmarkFactoryHM::print_conditional_effect( void LandmarkFactoryHM::print_action( const VariablesProxy &variables, const PiMOperator &op, const std::vector, std::set>> &conditions) const { - log << "Action " << op.index << endl; + log << "Action " << op.id << endl; log << "Precondition: "; set preconditions = get_propositions(op.precondition, hm_table); for (const FactPair &precondition : preconditions) { @@ -622,7 +622,7 @@ void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { for (int i = 0; i < num_operators; ++i) { const OperatorProxy &op = operators[i]; PiMOperator &pm_op = pm_operators[op.get_id()]; - pm_op.index = i; + pm_op.id = i; Propositions preconditions = initialize_preconditions(variables, op, pm_op); @@ -808,125 +808,131 @@ void LandmarkFactoryHM::propagate_pm_propositions( } } -void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { - // get subsets of initial state - vector init_subsets = - get_m_sets(task_proxy.get_variables(), task_proxy.get_initial_state()); - - TriggerSet current_trigger, next_trigger; +LandmarkFactoryHM::TriggerSet LandmarkFactoryHM::mark_state_propositions_reached( + const State &state, const VariablesProxy &variables) { + vector state_propositions = get_m_sets(variables, state); + TriggerSet triggers; - // for all of the initial state <= m subsets, mark level = 0 - for (size_t i = 0; i < init_subsets.size(); ++i) { - int index = set_indices[init_subsets[i]]; + for (const auto &proposition : state_propositions) { + int index = set_indices[proposition]; hm_table[index].level = 0; - - // set actions to be applied - propagate_pm_propositions(index, true, current_trigger); + propagate_pm_propositions(index, true, triggers); } - // mark actions with no precondition to be applied - for (size_t i = 0; i < pm_operators.size(); ++i) { + /* TODO: This should be dealt with already due to the + `propagate_pm_propositions` above, isn't it? */ + for (int i = 0; i < static_cast(pm_operators.size()); ++i) { if (num_unsatisfied_preconditions[i].first == 0) { - // create empty set or clear prev entries - current_trigger[i].clear(); + /* + Clear trigger for `op_id` (or create entry if it does not yet + exist) to indicate that the precondition of the corresponding + operator is satisfied and all conditional noops are triggered. + */ + triggers[i].clear(); } } + return triggers; +} - vector::iterator it; - TriggerSet::iterator op_it; - - list local_landmarks; - list local_necessary; - - size_t prev_size; - - int level = 1; - - // while we have actions to apply - while (!current_trigger.empty()) { - for (op_it = current_trigger.begin(); op_it != current_trigger.end(); ++op_it) { - local_landmarks.clear(); - local_necessary.clear(); - - int op_index = op_it->first; - PiMOperator &action = pm_operators[op_index]; +pair, list> LandmarkFactoryHM::collect_precondition_landmarks( + const PiMOperator &op) const { + /* For each proposition, the proposition itself is not stored even though + it is a landmark for itself. */ + // TODO: Can we use something other than lists here? + list landmarks, necessary; + for (int precondition : op.precondition) { + union_with(landmarks, hm_table[precondition].landmarks); + insert_into(landmarks, precondition); - // gather landmarks for pcs - // in the set of landmarks for each fact, the fact itself is not stored - // (only landmarks preceding it) - for (it = action.precondition.begin(); it != action.precondition.end(); ++it) { - union_with(local_landmarks, hm_table[*it].landmarks); - insert_into(local_landmarks, *it); + if (use_orders) { + insert_into(necessary, precondition); + } + } + return {move(landmarks), move(necessary)}; +} +void LandmarkFactoryHM::update_effect_landmarks( + const PiMOperator &op, int level, const list &landmarks, + const list &necessary, TriggerSet &triggers) { + for (int effect : op.effect) { + if (hm_table[effect].level != -1) { + size_t prev_size = hm_table[effect].landmarks.size(); + intersect_with(hm_table[effect].landmarks, landmarks); + + /* + If the effect appears in `landmarks`, the proposition is not + achieved for the first time. No need to intersect for + greedy-necessary orderings or add `op` to the first achievers. + */ + if (!contains(landmarks, effect)) { + insert_into(hm_table[effect].first_achievers, op.id); if (use_orders) { - insert_into(local_necessary, *it); + intersect_with(hm_table[effect].necessary, necessary); } } - for (it = action.effect.begin(); it != action.effect.end(); ++it) { - if (hm_table[*it].level != -1) { - prev_size = hm_table[*it].landmarks.size(); - intersect_with(hm_table[*it].landmarks, local_landmarks); - - // if the add effect appears in local landmarks, - // fact is being achieved for >1st time - // no need to intersect for gn orderings - // or add op to first achievers - if (!contains(local_landmarks, *it)) { - insert_into(hm_table[*it].first_achievers, op_index); - if (use_orders) { - intersect_with(hm_table[*it].necessary, local_necessary); - } - } - - if (hm_table[*it].landmarks.size() != prev_size) - propagate_pm_propositions(*it, false, next_trigger); - } else { - hm_table[*it].level = level; - hm_table[*it].landmarks = local_landmarks; - if (use_orders) { - hm_table[*it].necessary = local_necessary; - } - insert_into(hm_table[*it].first_achievers, op_index); - propagate_pm_propositions(*it, true, next_trigger); - } + if (hm_table[effect].landmarks.size() != prev_size) { + propagate_pm_propositions(effect, false, triggers); } - - // landmarks changed for action itself, have to recompute - // landmarks for all noop effects - if (op_it->second.empty()) { - for (size_t i = 0; i < action.conditional_noops.size(); ++i) { - // actions pcs are satisfied, but cond. effects may still have - // unsatisfied pcs - if (num_unsatisfied_preconditions[op_index].second[i] == 0) { - compute_noop_landmarks(op_index, i, - local_landmarks, - local_necessary, - level, next_trigger); - } - } + } else { + hm_table[effect].level = level; + hm_table[effect].landmarks = landmarks; + if (use_orders) { + hm_table[effect].necessary = necessary; } - // only recompute landmarks for conditions whose - // landmarks have changed - else { - for (set::iterator noop_it = op_it->second.begin(); - noop_it != op_it->second.end(); ++noop_it) { - assert(num_unsatisfied_preconditions[op_index].second[*noop_it] == 0); - - compute_noop_landmarks(op_index, *noop_it, - local_landmarks, - local_necessary, - level, next_trigger); - } + insert_into(hm_table[effect].first_achievers, op.id); + propagate_pm_propositions(effect, true, triggers); + } + } +} + +void LandmarkFactoryHM::update_noop_landmarks( + const unordered_set ¤t_triggers, const PiMOperator &op, + int level, const list &landmarks, const list &necessary, + TriggerSet &next_triggers) { + if (current_triggers.empty()) { + /* + The landmarks for the operator have changed, so we have to recompute + the landmarks for all conditional noops if all their effect conditions + are satisfied. + */ + int num_noops = static_cast(op.conditional_noops.size()); + for (int i = 0; i < num_noops; ++i) { + if (num_unsatisfied_preconditions[op.id].second[i] == 0) { + compute_noop_landmarks( + op.id, i, landmarks, necessary, level, next_triggers); } } + } else { + // Only recompute landmarks for conditions whose landmarks have changed. + for (int noop_it : current_triggers) { + assert(num_unsatisfied_preconditions[op.id].second[noop_it] == 0); + compute_noop_landmarks( + op.id, noop_it, landmarks, necessary, level, next_triggers); + } + } +} + +void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { + TriggerSet current_trigger = mark_state_propositions_reached( + task_proxy.get_initial_state(), task_proxy.get_variables()); + TriggerSet next_trigger; + for (int level = 1; !current_trigger.empty(); ++level) { + for (auto &[op_id, triggers] : current_trigger) { + PiMOperator &op = pm_operators[op_id]; + auto [local_landmarks, local_necessary] = + collect_precondition_landmarks(op); + update_effect_landmarks( + op, level, local_landmarks, local_necessary, next_trigger); + update_noop_landmarks(triggers, op, level, local_landmarks, + local_necessary, next_trigger); + } current_trigger.swap(next_trigger); next_trigger.clear(); if (log.is_at_least_verbose()) { log << "Level " << level << " completed." << endl; } - ++level; } if (log.is_at_least_normal()) { log << "h^m landmarks computed." << endl; @@ -934,25 +940,22 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { } void LandmarkFactoryHM::compute_noop_landmarks( - int op_index, int noop_index, - list const &local_landmarks, - list const &local_necessary, - int level, - TriggerSet &next_trigger) { + int op_id, int noop_index, const list &landmarks, + const list &necessary, int level, TriggerSet &next_trigger) { list cn_necessary, cn_landmarks; size_t prev_size; int pm_proposition; - PiMOperator &action = pm_operators[op_index]; - vector &pc_eff_pair = action.conditional_noops[noop_index]; + PiMOperator &op = pm_operators[op_id]; + vector &pc_eff_pair = op.conditional_noops[noop_index]; cn_landmarks.clear(); - cn_landmarks = local_landmarks; + cn_landmarks = landmarks; if (use_orders) { cn_necessary.clear(); - cn_necessary = local_necessary; + cn_necessary = necessary; } size_t i; @@ -979,7 +982,7 @@ void LandmarkFactoryHM::compute_noop_landmarks( // no need to intersect for gn orderings // or add op to first achievers if (!contains(cn_landmarks, pm_proposition)) { - insert_into(hm_table[pm_proposition].first_achievers, op_index); + insert_into(hm_table[pm_proposition].first_achievers, op_id); if (use_orders) { intersect_with(hm_table[pm_proposition].necessary, cn_necessary); } @@ -993,7 +996,7 @@ void LandmarkFactoryHM::compute_noop_landmarks( if (use_orders) { hm_table[pm_proposition].necessary = cn_necessary; } - insert_into(hm_table[pm_proposition].first_achievers, op_index); + insert_into(hm_table[pm_proposition].first_achievers, op_id); propagate_pm_propositions(pm_proposition, true, next_trigger); } } diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 606638f5a7..bd0140778e 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -33,7 +33,7 @@ struct PiMOperator { /* In each of the inner vectors, the effect conditions are separated from the effect values by an entry of the value -1. */ std::vector> conditional_noops; - int index; + int id; }; // represents a proposition in the P^m problem @@ -69,7 +69,7 @@ using PropositionSetToIntMap = std::map; class LandmarkFactoryHM : public LandmarkFactory { - using TriggerSet = std::unordered_map>; + using TriggerSet = std::unordered_map>; const int m; const bool conjunctive_landmarks; @@ -93,12 +93,22 @@ class LandmarkFactoryHM : public LandmarkFactory { virtual void generate_landmarks( const std::shared_ptr &task) override; + TriggerSet mark_state_propositions_reached( + const State &state, const VariablesProxy &variables); + std::pair, std::list> collect_precondition_landmarks( + const PiMOperator &op) const; + void update_effect_landmarks( + const PiMOperator &op, int level, const std::list &landmarks, + const std::list &necessary, TriggerSet &triggers); + void update_noop_landmarks( + const std::unordered_set ¤t_triggers, const PiMOperator &op, + int level, const std::list &landmarks, + const std::list &necessary, TriggerSet &next_triggers); void compute_hm_landmarks(const TaskProxy &task_proxy); - void compute_noop_landmarks(int op_index, int noop_index, - std::list const &local_landmarks, - std::list const &local_necessary, - int level, - TriggerSet &next_trigger); + void compute_noop_landmarks( + int op_id, int noop_index, const std::list &local_landmarks, + const std::list &local_necessary, int level, + TriggerSet &next_trigger); void trigger_operator( int op_id, bool newly_discovered, TriggerSet &trigger); From 807441d3001588591fc5dcc4ac3bb8ab180cf66e Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 13 Mar 2025 19:04:26 +0100 Subject: [PATCH 040/108] Clean up computing conditional noop landmarks. --- src/search/landmarks/landmark_factory_h_m.cc | 119 ++++++------------- src/search/landmarks/landmark_factory_h_m.h | 11 +- 2 files changed, 44 insertions(+), 86 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 52d9ca1288..4baa744816 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -834,54 +834,52 @@ LandmarkFactoryHM::TriggerSet LandmarkFactoryHM::mark_state_propositions_reached return triggers; } -pair, list> LandmarkFactoryHM::collect_precondition_landmarks( - const PiMOperator &op) const { +void LandmarkFactoryHM::collect_condition_landmarks( + const vector &condition, list &landmarks, + list &necessary) const { /* For each proposition, the proposition itself is not stored even though it is a landmark for itself. */ - // TODO: Can we use something other than lists here? - list landmarks, necessary; - for (int precondition : op.precondition) { - union_with(landmarks, hm_table[precondition].landmarks); - insert_into(landmarks, precondition); + for (int proposition : condition) { + union_with(landmarks, hm_table[proposition].landmarks); + insert_into(landmarks, proposition); if (use_orders) { - insert_into(necessary, precondition); + insert_into(necessary, proposition); } } - return {move(landmarks), move(necessary)}; } void LandmarkFactoryHM::update_effect_landmarks( - const PiMOperator &op, int level, const list &landmarks, + int op_id, const vector &effect, int level, const list &landmarks, const list &necessary, TriggerSet &triggers) { - for (int effect : op.effect) { - if (hm_table[effect].level != -1) { - size_t prev_size = hm_table[effect].landmarks.size(); - intersect_with(hm_table[effect].landmarks, landmarks); + for (int proposition : effect) { + if (hm_table[proposition].level != -1) { + size_t prev_size = hm_table[proposition].landmarks.size(); + intersect_with(hm_table[proposition].landmarks, landmarks); /* If the effect appears in `landmarks`, the proposition is not achieved for the first time. No need to intersect for greedy-necessary orderings or add `op` to the first achievers. */ - if (!contains(landmarks, effect)) { - insert_into(hm_table[effect].first_achievers, op.id); + if (!contains(landmarks, proposition)) { + insert_into(hm_table[proposition].first_achievers, op_id); if (use_orders) { - intersect_with(hm_table[effect].necessary, necessary); + intersect_with(hm_table[proposition].necessary, necessary); } } - if (hm_table[effect].landmarks.size() != prev_size) { - propagate_pm_propositions(effect, false, triggers); + if (hm_table[proposition].landmarks.size() != prev_size) { + propagate_pm_propositions(proposition, false, triggers); } } else { - hm_table[effect].level = level; - hm_table[effect].landmarks = landmarks; + hm_table[proposition].level = level; + hm_table[proposition].landmarks = landmarks; if (use_orders) { - hm_table[effect].necessary = necessary; + hm_table[proposition].necessary = necessary; } - insert_into(hm_table[effect].first_achievers, op.id); - propagate_pm_propositions(effect, true, triggers); + insert_into(hm_table[proposition].first_achievers, op_id); + propagate_pm_propositions(proposition, true, triggers); } } } @@ -919,13 +917,14 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { TriggerSet next_trigger; for (int level = 1; !current_trigger.empty(); ++level) { for (auto &[op_id, triggers] : current_trigger) { + list local_landmarks, local_necessary; PiMOperator &op = pm_operators[op_id]; - auto [local_landmarks, local_necessary] = - collect_precondition_landmarks(op); - update_effect_landmarks( - op, level, local_landmarks, local_necessary, next_trigger); + collect_condition_landmarks( + op.precondition, local_landmarks, local_necessary); + update_effect_landmarks(op_id, op.effect, level, local_landmarks, + local_necessary, next_trigger); update_noop_landmarks(triggers, op, level, local_landmarks, - local_necessary, next_trigger); + local_necessary, next_trigger); } current_trigger.swap(next_trigger); next_trigger.clear(); @@ -942,64 +941,20 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { void LandmarkFactoryHM::compute_noop_landmarks( int op_id, int noop_index, const list &landmarks, const list &necessary, int level, TriggerSet &next_trigger) { - list cn_necessary, cn_landmarks; - size_t prev_size; - int pm_proposition; - - PiMOperator &op = pm_operators[op_id]; - vector &pc_eff_pair = op.conditional_noops[noop_index]; - - cn_landmarks.clear(); - - cn_landmarks = landmarks; + const vector &conditional_noop = + pm_operators[op_id].conditional_noops[noop_index]; + const auto &[effect_condition, effect] = + split_conditional_noop (conditional_noop); + list cn_landmarks = landmarks; + list cn_necessary; if (use_orders) { - cn_necessary.clear(); cn_necessary = necessary; } - size_t i; - for (i = 0; (pm_proposition = pc_eff_pair[i]) != -1; ++i) { - union_with(cn_landmarks, hm_table[pm_proposition].landmarks); - insert_into(cn_landmarks, pm_proposition); - - if (use_orders) { - insert_into(cn_necessary, pm_proposition); - } - } - - // go to the beginning of the effects section - ++i; - - for (; i < pc_eff_pair.size(); ++i) { - pm_proposition = pc_eff_pair[i]; - if (hm_table[pm_proposition].level != -1) { - prev_size = hm_table[pm_proposition].landmarks.size(); - intersect_with(hm_table[pm_proposition].landmarks, cn_landmarks); - - // if the add effect appears in cn_landmarks, - // fact is being achieved for >1st time - // no need to intersect for gn orderings - // or add op to first achievers - if (!contains(cn_landmarks, pm_proposition)) { - insert_into(hm_table[pm_proposition].first_achievers, op_id); - if (use_orders) { - intersect_with(hm_table[pm_proposition].necessary, cn_necessary); - } - } - - if (hm_table[pm_proposition].landmarks.size() != prev_size) - propagate_pm_propositions(pm_proposition, false, next_trigger); - } else { - hm_table[pm_proposition].level = level; - hm_table[pm_proposition].landmarks = cn_landmarks; - if (use_orders) { - hm_table[pm_proposition].necessary = cn_necessary; - } - insert_into(hm_table[pm_proposition].first_achievers, op_id); - propagate_pm_propositions(pm_proposition, true, next_trigger); - } - } + collect_condition_landmarks(effect_condition, cn_landmarks, cn_necessary); + update_effect_landmarks( + op_id, effect, level, cn_landmarks, cn_necessary, next_trigger); } void LandmarkFactoryHM::add_landmark_node(int set_index, bool goal) { diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index bd0140778e..ebcc059838 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -95,11 +95,14 @@ class LandmarkFactoryHM : public LandmarkFactory { TriggerSet mark_state_propositions_reached( const State &state, const VariablesProxy &variables); - std::pair, std::list> collect_precondition_landmarks( - const PiMOperator &op) const; + void collect_condition_landmarks( + const std::vector &condition, std::list &landmarks, + std::list &necessary) + const; void update_effect_landmarks( - const PiMOperator &op, int level, const std::list &landmarks, - const std::list &necessary, TriggerSet &triggers); + int op_id, const std::vector &effect, int level, + const std::list &landmarks, const std::list &necessary, + TriggerSet &triggers); void update_noop_landmarks( const std::unordered_set ¤t_triggers, const PiMOperator &op, int level, const std::list &landmarks, From 72d00d9d8a6cc32066e60c0310db12fe09f60b93 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Mon, 17 Mar 2025 15:50:24 +0100 Subject: [PATCH 041/108] Clarify landmark generation. --- src/search/landmarks/landmark_factory_h_m.cc | 149 +++++++++++-------- src/search/landmarks/landmark_factory_h_m.h | 18 ++- 2 files changed, 97 insertions(+), 70 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 4baa744816..f7ac019ce1 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -1,5 +1,7 @@ #include "landmark_factory_h_m.h" +#include + #include "exploration.h" #include "landmark.h" @@ -9,6 +11,7 @@ #include "../task_utils/task_properties.h" #include "../utils/collections.h" #include "../utils/logging.h" +#include "../utils/markup.h" #include "../utils/system.h" #include @@ -756,7 +759,7 @@ void LandmarkFactoryHM::free_unneeded_memory() { utils::release_vector_memory(num_unsatisfied_preconditions); set_indices.clear(); - landmark_node_table.clear(); + landmark_nodes.clear(); } void LandmarkFactoryHM::trigger_operator( @@ -865,7 +868,7 @@ void LandmarkFactoryHM::update_effect_landmarks( if (!contains(landmarks, proposition)) { insert_into(hm_table[proposition].first_achievers, op_id); if (use_orders) { - intersect_with(hm_table[proposition].necessary, necessary); + intersect_with(hm_table[proposition].prerequisite_landmark, necessary); } } @@ -876,7 +879,7 @@ void LandmarkFactoryHM::update_effect_landmarks( hm_table[proposition].level = level; hm_table[proposition].landmarks = landmarks; if (use_orders) { - hm_table[proposition].necessary = necessary; + hm_table[proposition].prerequisite_landmark = necessary; } insert_into(hm_table[proposition].first_achievers, op_id); propagate_pm_propositions(proposition, true, triggers); @@ -958,91 +961,98 @@ void LandmarkFactoryHM::compute_noop_landmarks( } void LandmarkFactoryHM::add_landmark_node(int set_index, bool goal) { - if (landmark_node_table.find(set_index) == landmark_node_table.end()) { + if (!landmark_nodes.contains(set_index)) { const HMEntry &hm_entry = hm_table[set_index]; vector facts(hm_entry.propositions); utils::sort_unique(facts); assert(!facts.empty()); - Landmark landmark(facts, false, (facts.size() > 1), goal); - landmark.first_achievers.insert( - hm_entry.first_achievers.begin(), - hm_entry.first_achievers.end()); - landmark_node_table[set_index] = &landmark_graph->add_landmark(move(landmark)); + bool conjunctive = facts.size() > 1; + Landmark landmark(move(facts), false, conjunctive, goal); + landmark.first_achievers.insert(hm_entry.first_achievers.begin(), + hm_entry.first_achievers.end()); + landmark_nodes[set_index] = + &landmark_graph->add_landmark(move(landmark)); } } -void LandmarkFactoryHM::generate_landmarks( - const shared_ptr &task) { - TaskProxy task_proxy(*task); - initialize(task_proxy); - compute_hm_landmarks(task_proxy); - // now construct landmarks graph - Propositions goals = task_properties::get_fact_pairs(task_proxy.get_goals()); - VariablesProxy variables = task_proxy.get_variables(); - vector goal_subsets = get_m_sets(variables, goals); - list all_landmarks; - for (const Propositions &goal_subset : goal_subsets) { - assert(set_indices.find(goal_subset) != set_indices.end()); - +list LandmarkFactoryHM::collect_and_add_landmarks_to_landmark_graph( + const VariablesProxy &variables, const Propositions &goals) { + list landmarks; + for (const Propositions &goal_subset : get_m_sets(variables, goals)) { + assert(set_indices.contains(goal_subset)); int set_index = set_indices[goal_subset]; if (hm_table[set_index].level == -1) { if (log.is_at_least_verbose()) { - log << endl << endl << "Subset of goal not reachable !!." << endl << endl << endl; + log << "\n\nSubset of goal not reachable !!.\n\n\n"; log << "Subset is: "; - print_proposition_set(variables, hm_table[set_index].propositions); + print_proposition_set( + variables, hm_table[set_index].propositions); log << endl; } } - // set up goals landmarks for processing - union_with(all_landmarks, hm_table[set_index].landmarks); - - // the goal itself is also a landmark - insert_into(all_landmarks, set_index); - - // make a node for the goal, with in_goal = true; + union_with(landmarks, hm_table[set_index].landmarks); + // The goal itself is also a landmark. + insert_into(landmarks, set_index); add_landmark_node(set_index, true); } - // now make remaining landmark nodes - for (int landmark : all_landmarks) { + for (int landmark : landmarks) { add_landmark_node(landmark, false); } - if (use_orders) { - // do reduction of graph - // if f2 is landmark for f1, subtract landmark set of f2 from that of f1 - for (int f1 : all_landmarks) { - list everything_to_remove; - for (int f2 : hm_table[f1].landmarks) { - union_with(everything_to_remove, hm_table[f2].landmarks); - } - set_minus(hm_table[f1].landmarks, everything_to_remove); - // remove necessaries here, otherwise they will be overwritten - // since we are writing them as greedy nec. orderings. - if (use_orders) - set_minus(hm_table[f1].landmarks, hm_table[f1].necessary); - } - - // add the orderings. + return landmarks; +} - for (int set_index : all_landmarks) { - for (int landmark : hm_table[set_index].landmarks) { - assert(landmark_node_table.find(landmark) != landmark_node_table.end()); - assert(landmark_node_table.find(set_index) != landmark_node_table.end()); +void LandmarkFactoryHM::reduce_landmarks(const list &landmarks) { + assert(use_orders); + for (int landmark1 : landmarks) { + list extended_prerequisites = + hm_table[landmark1].prerequisite_landmark; + for (int landmark2 : hm_table[landmark1].landmarks) { + union_with(extended_prerequisites, hm_table[landmark2].landmarks); + } + set_minus(hm_table[landmark1].landmarks, extended_prerequisites); + } +} - add_ordering_or_replace_if_stronger( - *landmark_node_table[landmark], - *landmark_node_table[set_index], OrderingType::NATURAL); - } - for (int gn : hm_table[set_index].necessary) { - add_ordering_or_replace_if_stronger( - *landmark_node_table[gn], *landmark_node_table[set_index], - OrderingType::GREEDY_NECESSARY); - } +void LandmarkFactoryHM::add_landmark_orderings(const list &landmarks) { + for (int to : landmarks) { + assert(landmark_nodes.contains(to)); + for (int from : hm_table[to].prerequisite_landmark) { + assert(landmark_nodes.contains(from)); + add_ordering_or_replace_if_stronger( + *landmark_nodes[from], *landmark_nodes[to], + OrderingType::GREEDY_NECESSARY); + } + for (int from : hm_table[to].landmarks) { + assert(landmark_nodes.contains(from)); + add_ordering_or_replace_if_stronger( + *landmark_nodes[from], *landmark_nodes[to], + OrderingType::NATURAL); } } - free_unneeded_memory(); +} +void LandmarkFactoryHM::construct_landmark_graph( + const TaskProxy &task_proxy) { + Propositions goals = + task_properties::get_fact_pairs(task_proxy.get_goals()); + VariablesProxy variables = task_proxy.get_variables(); + list landmarks = + collect_and_add_landmarks_to_landmark_graph(variables, goals); + if (use_orders) { + reduce_landmarks(landmarks); + add_landmark_orderings(landmarks); + } +} + +void LandmarkFactoryHM::generate_landmarks( + const shared_ptr &task) { + TaskProxy task_proxy(*task); + initialize(task_proxy); + compute_hm_landmarks(task_proxy); + construct_landmark_graph(task_proxy); + free_unneeded_memory(); postprocess(task_proxy); } @@ -1057,8 +1067,17 @@ class LandmarkFactoryHMFeature // document_group(""); document_title("h^m Landmarks"); document_synopsis( - "The landmark generation method introduced by " - "Keyder, Richter & Helmert (ECAI 2010)."); + "The landmark generation method introduced in the " + "following paper" + + utils::format_conference_reference( + {"Emil Keyder", "Silvia Richter", "Malte Helmert"}, + "Sound and Complete Landmarks for And/Or Graphs", + "https://ai.dmi.unibas.ch/papers/keyder-et-al-ecai2010.pdf", + "Proceedings of the 19th European Conference on Artificial " + "Intelligence (ECAI 2010)", + "335-340", + "IOS Press", + "2010")); add_option( "m", "subset size (if unsure, use the default of 2)", "2"); diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index ebcc059838..b22ed7cfb0 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -46,9 +46,12 @@ struct HMEntry { // TODO: Can we replace the `list` data type? std::list landmarks; - // TODO: What does the following comment mean? What is a "greedy necessary landmark"? - // Greedy necessary landmarks, disjoint from landmarks - std::list necessary; + /* + Landmarks that are "preconditions" to achieve this `HMEntry`. This + set is disjoint from `landmarks` above and used to derive + greedy-necessary orderings. + */ + std::list prerequisite_landmark; std::list first_achievers; @@ -75,7 +78,7 @@ class LandmarkFactoryHM : public LandmarkFactory { const bool conjunctive_landmarks; const bool use_orders; - std::unordered_map landmark_node_table; + std::unordered_map landmark_nodes; std::vector hm_table; std::vector pm_operators; @@ -90,6 +93,11 @@ class LandmarkFactoryHM : public LandmarkFactory { // TODO: Instead reserve the first entry of the vector for the operator itself. std::vector>> num_unsatisfied_preconditions; + std::list collect_and_add_landmarks_to_landmark_graph( + const VariablesProxy &variables, const Propositions &propositions); + void reduce_landmarks(const std::list &landmarks); + void add_landmark_orderings(const std::list &landmarks); + void construct_landmark_graph(const TaskProxy &task_proxy); virtual void generate_landmarks( const std::shared_ptr &task) override; @@ -107,11 +115,11 @@ class LandmarkFactoryHM : public LandmarkFactory { const std::unordered_set ¤t_triggers, const PiMOperator &op, int level, const std::list &landmarks, const std::list &necessary, TriggerSet &next_triggers); - void compute_hm_landmarks(const TaskProxy &task_proxy); void compute_noop_landmarks( int op_id, int noop_index, const std::list &local_landmarks, const std::list &local_necessary, int level, TriggerSet &next_trigger); + void compute_hm_landmarks(const TaskProxy &task_proxy); void trigger_operator( int op_id, bool newly_discovered, TriggerSet &trigger); From cc65d79a81cdc9a69523e5c22c55e06b04893eb8 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Mon, 17 Mar 2025 17:15:05 +0100 Subject: [PATCH 042/108] Deal with most TODOs. --- src/search/landmarks/landmark_factory_h_m.cc | 37 ++++++++++++-------- src/search/landmarks/landmark_factory_h_m.h | 5 ++- 2 files changed, 25 insertions(+), 17 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index f7ac019ce1..94591502b8 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -21,10 +21,9 @@ using namespace std; using utils::ExitCode; namespace landmarks { -// TODO: Can we turn these set operations into static functions? // alist = alist \cup other template -void union_with(list &alist, const list &other) { +static void union_with(list &alist, const list &other) { auto it1 = alist.begin(); auto it2 = other.begin(); @@ -44,7 +43,7 @@ void union_with(list &alist, const list &other) { // alist = alist \cap other template -void intersect_with(list &alist, const list &other) { +static void intersect_with(list &alist, const list &other) { auto it1 = alist.begin(); auto it2 = other.begin(); @@ -66,7 +65,7 @@ void intersect_with(list &alist, const list &other) { // alist = alist \setminus other template -void set_minus(list &alist, const list &other) { +static void set_minus(list &alist, const list &other) { auto it1 = alist.begin(); auto it2 = other.begin(); @@ -87,7 +86,7 @@ void set_minus(list &alist, const list &other) { // alist = alist \cup {val} template -void insert_into(list &alist, const T &val) { +static void insert_into(list &alist, const T &val) { auto it1 = alist.begin(); while (it1 != alist.end()) { @@ -273,16 +272,29 @@ vector LandmarkFactoryHM::get_m_sets( return subsets; } +#ifndef NDEBUG +static bool proposition_variables_disjoint(const Propositions &set1, + const Propositions &set2) { + for (auto [var1, val1] : set1) { + for (auto [var2, val2] : set2) { + if (var1 == var2) { + return false; + } + } + } + return true; +} +#endif + /* Get subsets of size <= m such that at least one element from `superset1` and at least one element from `superset2` are included, except if a sets is empty. - We assume `superset1` and `superset2` are disjoint. - TODO: Assert that supersets are disjoint. Should the variables occurring in - the sets be disjoint, rather than their propositions? + We assume the variables in `superset1` and `superset2` are disjoint. */ vector LandmarkFactoryHM::get_split_m_sets( const VariablesProxy &variables, const Propositions &superset1, const Propositions &superset2) { + assert(proposition_variables_disjoint(superset1, superset2)); Propositions c; vector subsets; // If a set is empty, we do not have to include from it. TODO: Why not? @@ -709,10 +721,7 @@ static bool operator_can_achieve_landmark( auto mutex = [&](const FactPair &other) { return are_mutex(variables, atom, other); }; - if (any_of(postcondition.begin(), postcondition.end(), mutex) || - /* TODO: Since the precondition is factored into the postcondition, - I don't think we actually need this second `any_of` case. */ - any_of(precondition.begin(), precondition.end(), mutex)) { + if (any_of(postcondition.begin(), postcondition.end(), mutex)) { return false; } } @@ -822,8 +831,8 @@ LandmarkFactoryHM::TriggerSet LandmarkFactoryHM::mark_state_propositions_reached propagate_pm_propositions(index, true, triggers); } - /* TODO: This should be dealt with already due to the - `propagate_pm_propositions` above, isn't it? */ + /* This is necessary to trigger operators without preconditions which are + not dealt with in the `propagate_pm_propositions` above. */ for (int i = 0; i < static_cast(pm_operators.size()); ++i) { if (num_unsatisfied_preconditions[i].first == 0) { /* diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index b22ed7cfb0..d4d3e46bd2 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -44,7 +44,7 @@ struct HMEntry { // Level 0: present in initial state int level; - // TODO: Can we replace the `list` data type? + // TODO: Can we replace the `list` data type with `set` or even `vector`? std::list landmarks; /* Landmarks that are "preconditions" to achieve this `HMEntry`. This @@ -82,7 +82,7 @@ class LandmarkFactoryHM : public LandmarkFactory { std::vector hm_table; std::vector pm_operators; - // Maps each set of < m propositions to an int. TODO: What does this int indicate? + // Maps each set of < m propositions to an int. PropositionSetToIntMap set_indices; /* The number in the first position represents the amount of unsatisfied @@ -90,7 +90,6 @@ class LandmarkFactoryHM : public LandmarkFactory { position represents the amount of unsatisfied preconditions for each conditional noop operator. */ - // TODO: Instead reserve the first entry of the vector for the operator itself. std::vector>> num_unsatisfied_preconditions; std::list collect_and_add_landmarks_to_landmark_graph( From a6e7f7c058fa32ec2f8a0cbd116f27172985f886 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Mon, 17 Mar 2025 19:00:07 +0100 Subject: [PATCH 043/108] Clean up lm_merged. --- .../landmarks/landmark_factory_merged.cc | 107 +++++++++++------- .../landmarks/landmark_factory_merged.h | 11 +- .../landmarks/landmark_factory_rpg_sasp.cc | 4 +- src/search/landmarks/landmark_graph.cc | 19 ++-- src/search/landmarks/landmark_graph.h | 2 +- src/search/utils/component_errors.h | 6 +- 6 files changed, 91 insertions(+), 58 deletions(-) diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index a38cd84ad8..14023e5f1a 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -24,33 +24,30 @@ LandmarkFactoryMerged::LandmarkFactoryMerged( LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( const Landmark &landmark) const { - if (!landmark.is_disjunctive && !landmark.is_conjunctive) { - const FactPair &atom = landmark.atoms[0]; - if (landmark_graph->contains_simple_landmark(atom)) - return &landmark_graph->get_simple_landmark_node(atom); - else - return nullptr; - } else if (landmark.is_disjunctive) { - set atoms( - landmark.atoms.begin(), landmark.atoms.end()); - if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) - return &landmark_graph->get_disjunctive_landmark_node(landmark.atoms[0]); - else - return nullptr; - } else if (landmark.is_conjunctive) { - cerr << "Don't know how to handle conjunctive landmarks yet" << endl; + if (landmark.is_disjunctive) { + if (landmark_graph->contains_identical_disjunctive_landmark( + landmark.atoms)) { + return &landmark_graph->get_disjunctive_landmark_node( + landmark.atoms[0]); + } + return nullptr; + } + + if (landmark.is_conjunctive) { + cerr << "Don't know how to handle conjunctive landmarks yet..." << endl; utils::exit_with(ExitCode::SEARCH_UNSUPPORTED); } + + assert(landmark.atoms.size() == 1); + const FactPair &atom = landmark.atoms[0]; + if (landmark_graph->contains_simple_landmark(atom)) { + return &landmark_graph->get_simple_landmark_node(atom); + } return nullptr; } -void LandmarkFactoryMerged::generate_landmarks( +vector> LandmarkFactoryMerged::generate_landmark_graphs_of_subfactories( const shared_ptr &task) { - if (log.is_at_least_normal()) { - log << "Merging " << landmark_factories.size() - << " landmark graphs" << endl; - } - vector> landmark_graphs; landmark_graphs.reserve(landmark_factories.size()); achievers_calculated = true; @@ -59,26 +56,36 @@ void LandmarkFactoryMerged::generate_landmarks( landmark_factory->compute_landmark_graph(task)); achievers_calculated &= landmark_factory->achievers_are_calculated(); } + return landmark_graphs; +} +void LandmarkFactoryMerged::add_simple_landmarks( + const vector> &landmark_graphs) const { if (log.is_at_least_normal()) { log << "Adding simple landmarks" << endl; } - for (size_t i = 0; i < landmark_graphs.size(); ++i) { + for (auto &landmark_graph : landmark_graphs) { // TODO: loop over landmarks instead - for (const auto &node : *landmark_graphs[i]) { + for (const auto &node : *landmark_graph) { const Landmark &landmark = node->get_landmark(); if (landmark.is_conjunctive) { - cerr << "Don't know how to handle conjunctive landmarks yet" << endl; + cerr << "Don't know how to handle conjunctive landmarks yet" + << endl; utils::exit_with(ExitCode::SEARCH_UNSUPPORTED); - } else if (landmark.is_disjunctive) { + } + if (landmark.is_disjunctive) { continue; - } else if (!landmark_graph->contains_landmark(landmark.atoms[0])) { + } + if (!landmark_graph->contains_landmark(landmark.atoms[0])) { Landmark copy(landmark); landmark_graph->add_landmark(move(copy)); } } } +} +void LandmarkFactoryMerged::add_disjunctive_landmarks( + const vector> &landmark_graphs) const { if (log.is_at_least_normal()) { log << "Adding disjunctive landmarks" << endl; } @@ -106,34 +113,46 @@ void LandmarkFactoryMerged::generate_landmarks( } } } +} +void LandmarkFactoryMerged::add_landmark_orderings( + const vector> &landmark_graphs) const { if (log.is_at_least_normal()) { log << "Adding orderings" << endl; } - for (size_t i = 0; i < landmark_graphs.size(); ++i) { - for (const auto &from_orig : *landmark_graphs[i]) { - LandmarkNode *from = get_matching_landmark(from_orig->get_landmark()); - if (from) { - for (const auto &to : from_orig->children) { - const LandmarkNode *to_orig = to.first; - OrderingType type = to.second; - LandmarkNode *to_node = get_matching_landmark(to_orig->get_landmark()); - if (to_node) { + for (const auto &landmark_graph : landmark_graphs) { + for (const auto &from_old : *landmark_graph) { + LandmarkNode *from_new = + get_matching_landmark(from_old->get_landmark()); + if (from_new) { + for (const auto &[to_old, type] : from_old->children) { + LandmarkNode *to_new = + get_matching_landmark(to_old->get_landmark()); + if (to_new) { add_ordering_or_replace_if_stronger( - *from, *to_node, type); - } else { - if (log.is_at_least_normal()) { - log << "Discarded to ordering" << endl; - } + *from_new, *to_new, type); + } else if (log.is_at_least_normal()) { + log << "Discarded to ordering" << endl; } } - } else { - if (log.is_at_least_normal()) { - log << "Discarded from ordering" << endl; - } + } else if (log.is_at_least_normal()) { + log << "Discarded from ordering" << endl; } } } +} + +void LandmarkFactoryMerged::generate_landmarks( + const shared_ptr &task) { + if (log.is_at_least_normal()) { + log << "Merging " << landmark_factories.size() + << " landmark graphs" << endl; + } + vector> landmark_graphs = + generate_landmark_graphs_of_subfactories(task); + add_simple_landmarks(landmark_graphs); + add_disjunctive_landmarks(landmark_graphs); + add_landmark_orderings(landmark_graphs); postprocess(); } diff --git a/src/search/landmarks/landmark_factory_merged.h b/src/search/landmarks/landmark_factory_merged.h index e40df1146c..0ce0fdfed2 100644 --- a/src/search/landmarks/landmark_factory_merged.h +++ b/src/search/landmarks/landmark_factory_merged.h @@ -9,7 +9,16 @@ namespace landmarks { class LandmarkFactoryMerged : public LandmarkFactory { std::vector> landmark_factories; - virtual void generate_landmarks(const std::shared_ptr &task) override; + std::vector> generate_landmark_graphs_of_subfactories( + const std::shared_ptr &task); + void add_simple_landmarks( + const std::vector> &landmark_graphs) const; + void add_disjunctive_landmarks( + const std::vector> &landmark_graphs) const; + void add_landmark_orderings( + const std::vector> &landmark_graphs) const; + virtual void generate_landmarks( + const std::shared_ptr &task) override; void postprocess(); LandmarkNode *get_matching_landmark(const Landmark &landmark) const; public: diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 247e6a5db8..cc518f84b8 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -239,7 +239,9 @@ void LandmarkFactoryRpgSasp::found_disjunctive_landmark_and_ordering( // Note: don't add orders as we can't be sure that they're correct return; } else if (landmark_graph->contains_overlapping_disjunctive_landmark(atoms)) { - if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) { + vector atoms_vector(atoms.begin(), atoms.end()); + if (landmark_graph->contains_identical_disjunctive_landmark( + atoms_vector)) { new_landmark_node = &landmark_graph->get_disjunctive_landmark_node(*atoms.begin()); add_ordering_or_replace_if_stronger(*new_landmark_node, node, type); diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index 521382517f..d68b431e20 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -60,21 +60,22 @@ bool LandmarkGraph::contains_overlapping_disjunctive_landmark( } bool LandmarkGraph::contains_identical_disjunctive_landmark( - const set &atoms) const { - // TODO: What's going on here??? + const vector &atoms) const { + assert(!atoms.empty()); const LandmarkNode *node = nullptr; for (const FactPair &atom : atoms) { auto it = disjunctive_landmarks_to_nodes.find(atom); - if (it == disjunctive_landmarks_to_nodes.end()) + if (it == disjunctive_landmarks_to_nodes.end()) { + return false; + } + if (!node) { + node = it->second; + } else if (node != it->second) { return false; - else { - if (node && node != it->second) { - return false; - } else if (!node) - node = it->second; } } - return true; + assert(node); + return atoms.size() == node->get_landmark().atoms.size(); } bool LandmarkGraph::contains_landmark(const FactPair &atom) const { diff --git a/src/search/landmarks/landmark_graph.h b/src/search/landmarks/landmark_graph.h index 72ac95ce5f..c37a7bee91 100644 --- a/src/search/landmarks/landmark_graph.h +++ b/src/search/landmarks/landmark_graph.h @@ -141,7 +141,7 @@ class LandmarkGraph { /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ bool contains_identical_disjunctive_landmark( - const std::set &atoms) const; + const std::vector &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ diff --git a/src/search/utils/component_errors.h b/src/search/utils/component_errors.h index dba8c77342..cdc0e8c3d2 100644 --- a/src/search/utils/component_errors.h +++ b/src/search/utils/component_errors.h @@ -15,9 +15,11 @@ class ComponentArgumentError : public Exception { void verify_argument(bool b, const std::string &message); template -void verify_list_not_empty(const std::vector list, const std::string &name) { +void verify_list_not_empty( + const std::vector &list, const std::string &name) { if (list.empty()) { - throw ComponentArgumentError("List argument '" + name + "' has to be non-empty."); + throw ComponentArgumentError( + "List argument '" + name + "' has to be non-empty."); } } } From 4eaf0163feb5f43972bc5565ef81fe0dd9b2b65a Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Mon, 17 Mar 2025 21:15:09 +0100 Subject: [PATCH 044/108] Clarify approximation of reasonable orderings. --- .../landmark_factory_reasonable_orders_hps.cc | 144 +++++++++++------- .../landmark_factory_reasonable_orders_hps.h | 13 +- 2 files changed, 96 insertions(+), 61 deletions(-) diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 89ede1c82c..54379f632e 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -35,75 +35,101 @@ void LandmarkFactoryReasonableOrdersHPS::generate_landmarks( if (log.is_at_least_normal()) { log << "approx. reasonable orders" << endl; } - approximate_reasonable_orders(task_proxy); + approximate_reasonable_orderings(task_proxy); } -void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orders( - const TaskProxy &task_proxy) { - /* - Approximate reasonable orderings according to Hoffmann et al. (JAIR 2004). - - If node_p is in goal, then any node2_p which interferes with - node_p can be reasonably ordered before node_p. Otherwise, if - node_p is greedy necessary predecessor of node2, and there is - another predecessor "parent" of node2, then parent and all - predecessors of parent can be ordered reasonably before node_p if - they interfere with node_p. - */ - State initial_state = task_proxy.get_initial_state(); - int variables_size = task_proxy.get_variables().size(); - for (const auto &node_p : *landmark_graph) { - const Landmark &landmark = node_p->get_landmark(); - if (landmark.is_disjunctive) +void LandmarkFactoryReasonableOrdersHPS::approximate_goal_orderings( + const TaskProxy &task_proxy, LandmarkNode &node) const { + const Landmark &landmark = node.get_landmark(); + assert(landmark.is_true_in_goal); + for (const auto &other : *landmark_graph) { + const Landmark &other_landmark = other->get_landmark(); + if (landmark == other_landmark || other_landmark.is_disjunctive) { continue; + } + if (interferes(task_proxy, other_landmark, landmark)) { + add_ordering_or_replace_if_stronger( + *other, node, OrderingType::REASONABLE); + } + } +} - if (landmark.is_true_in_goal) { - for (const auto &node2_p : *landmark_graph) { - const Landmark &landmark2 = node2_p->get_landmark(); - if (landmark == landmark2 || landmark2.is_disjunctive) +unordered_set LandmarkFactoryReasonableOrdersHPS::collect_reasonable_ordering_candidates( + const LandmarkNode &node) { + unordered_set interesting_nodes; + for (const auto &[child, type] : node.children) { + if (type >= OrderingType::GREEDY_NECESSARY) { + // Found a landmark such that `node` ->_gn `child`. + for (const auto &[parent, parent_type]: child->parents) { + if (parent->get_landmark().is_disjunctive) { continue; - if (interferes(task_proxy, landmark2, landmark)) { - add_ordering_or_replace_if_stronger( - *node2_p, *node_p, OrderingType::REASONABLE); } - } - } else { - /* Collect candidates for reasonable orders in "interesting nodes". - Use hash set to filter duplicates. */ - unordered_set interesting_nodes(variables_size); - for (const auto &child : node_p->children) { - const LandmarkNode &node2_p = *child.first; - const OrderingType &type2 = child.second; - if (type2 >= OrderingType::GREEDY_NECESSARY) { - // Found node2_p: node_p ->_gn node2_p. - for (const auto &p : node2_p.parents) { - LandmarkNode &parent_node = *(p.first); - const OrderingType &type = p.second; - if (parent_node.get_landmark().is_disjunctive) - continue; - if (type >= OrderingType::NATURAL && - &parent_node != node_p.get()) { - /* Find predecessors or parent and collect in - "interesting nodes". */ - interesting_nodes.insert(&parent_node); - collect_ancestors(interesting_nodes, parent_node); - } - } - } - } - /* Insert reasonable orders between those members of - "interesting nodes" that interfere with node_p. */ - for (LandmarkNode *node2_p : interesting_nodes) { - const Landmark &landmark2 = node2_p->get_landmark(); - if (landmark == landmark2 || landmark2.is_disjunctive) - continue; - if (interferes(task_proxy, landmark2, landmark)) { - add_ordering_or_replace_if_stronger( - *node2_p, *node_p, OrderingType::REASONABLE); + if (parent_type >= OrderingType::NATURAL && *parent != node) { + /* Find predecessors or parent and collect in + `interesting nodes`. */ + interesting_nodes.insert(parent); + collect_ancestors(interesting_nodes, *parent); } } } } + return interesting_nodes; +} + +/* Insert reasonable orderings between the `candidates` that interfere + with `landmark`. */ +void LandmarkFactoryReasonableOrdersHPS::insert_reasonable_orderings( + const TaskProxy &task_proxy, + const unordered_set &candidates, + LandmarkNode &node, const Landmark &landmark) const { + for (LandmarkNode *other : candidates) { + const Landmark &other_landmark = other->get_landmark(); + if (landmark == other_landmark || other_landmark.is_disjunctive) { + continue; + } + if (interferes(task_proxy, other_landmark, landmark)) { + /* + TODO: If `other_landmark` interferes with `landmark`, then by + transitivity we know all natural predecessors of `other_landmark` + are also reasonably ordered before `landmark`, but here we only + add the one reasonable ordering. Maybe it's not worth adding the + others as well (transitivity), but it could be interesting to + test the effect of doing so, for example for the cycle heuristic. + */ + add_ordering_or_replace_if_stronger( + *other, node, OrderingType::REASONABLE); + } + } +} + +/* + Approximate reasonable orderings according to Hoffmann et al. (JAIR 2004): + + If `landmark` holds in the goal, any other landmark which interferes + with it is reasonably ordered before it. Otherwise, if `landmark` is a + greedy-necessary predecessor of another landmark, and there is + another predecessor `parent` of that other landmark (`candidates`), + then `parent` and all predecessors of `parent` are ordered reasonably + before `landmark` if they interfere with it. +*/ +void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orderings( + const TaskProxy &task_proxy) { + State initial_state = task_proxy.get_initial_state(); + for (const auto &node : *landmark_graph) { + const Landmark &landmark = node->get_landmark(); + if (landmark.is_disjunctive) { + continue; + } + + if (landmark.is_true_in_goal) { + approximate_goal_orderings(task_proxy, *node); + } else { + unordered_set candidates = + collect_reasonable_ordering_candidates(*node); + insert_reasonable_orderings( + task_proxy, candidates, *node, landmark); + } + } } bool LandmarkFactoryReasonableOrdersHPS::interferes( diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h index 3effeb4acd..4841b5ec46 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h @@ -7,9 +7,18 @@ namespace landmarks { class LandmarkFactoryReasonableOrdersHPS : public LandmarkFactory { std::shared_ptr landmark_factory; - virtual void generate_landmarks(const std::shared_ptr &task) override; + virtual void generate_landmarks( + const std::shared_ptr &task) override; - void approximate_reasonable_orders(const TaskProxy &task_proxy); + void approximate_goal_orderings( + const TaskProxy &task_proxy, LandmarkNode &node) const; + std::unordered_set collect_reasonable_ordering_candidates( + const LandmarkNode &node); + void insert_reasonable_orderings( + const TaskProxy &task_proxy, + const std::unordered_set &candidates, + LandmarkNode &node, const Landmark &landmark) const; + void approximate_reasonable_orderings(const TaskProxy &task_proxy); bool interferes( const TaskProxy &task_proxy, const Landmark &landmark_a, const Landmark &landmark_b) const; From bec3ad7b5efb0c28eb2cf6765ba074f530e15f34 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 18 Mar 2025 08:47:51 +0100 Subject: [PATCH 045/108] Fix performance drop in landmark generation time. --- src/search/landmarks/util.cc | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/search/landmarks/util.cc b/src/search/landmarks/util.cc index 7040921f13..6b6b2c4017 100644 --- a/src/search/landmarks/util.cc +++ b/src/search/landmarks/util.cc @@ -11,11 +11,12 @@ using namespace std; namespace landmarks { static bool condition_is_reachable( const ConditionsProxy &conditions, const vector> &reached) { - return all_of(begin(conditions), end(conditions), - [reached](const FactProxy &condition) { - auto [var, value] = condition.get_pair(); - return reached[var][value]; - }); + for (FactProxy condition : conditions) { + if (!reached[condition.get_variable().get_id()][condition.get_value()]) { + return false; + } + } + return true; } /* Check whether operator `op` can possibly make `landmark` true in a From e59d9425d4dc6b4075b7766bb6c1ed2125cd6aca Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 18 Mar 2025 10:54:11 +0100 Subject: [PATCH 046/108] TODO --- .../landmark_factory_reasonable_orders_hps.cc | 327 ++++++++++-------- .../landmark_factory_reasonable_orders_hps.h | 10 +- 2 files changed, 187 insertions(+), 150 deletions(-) diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 54379f632e..0be1cc345c 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -132,154 +132,10 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orderings( } } -bool LandmarkFactoryReasonableOrdersHPS::interferes( - const TaskProxy &task_proxy, const Landmark &landmark_a, - const Landmark &landmark_b) const { - /* Facts a and b interfere (i.e., achieving b before a would mean having to delete b - and re-achieve it in order to achieve a) if one of the following condition holds: - 1. a and b are mutex - 2. All actions that add a also add e, and e and b are mutex - 3. There is a greedy necessary predecessor x of a, and x and b are mutex - This is the definition of Hoffmann et al. except that they have one more condition: - "all actions that add a delete b". However, in our case (SAS+ formalism), this condition - is the same as 2. - */ - assert(landmark_a != landmark_b); - assert(!landmark_a.is_disjunctive && !landmark_b.is_disjunctive); - - VariablesProxy variables = task_proxy.get_variables(); - for (const FactPair &atom_b : landmark_b.atoms) { - FactProxy fact_b = variables[atom_b.var].get_fact(atom_b.value); - for (const FactPair &atom_a : landmark_a.atoms) { - FactProxy fact_a = variables[atom_a.var].get_fact(atom_a.value); - if (atom_a == atom_b) { - if (!landmark_a.is_conjunctive || !landmark_b.is_conjunctive) - return false; - else - continue; - } - - // 1. a, b mutex - if (fact_a.is_mutex(fact_b)) - return true; - - // 2. Shared effect e in all operators reaching a, and e, b are mutex - // Skip this for conjunctive nodes a, as they are typically achieved through a - // sequence of operators successively adding the parts of a - if (landmark_a.is_conjunctive) - continue; - - unordered_map shared_eff; - bool init = true; - const vector &op_or_axiom_ids = - get_operators_including_effect(atom_a); - // Intersect operators that achieve a one by one - for (int op_or_axiom_id : op_or_axiom_ids) { - // If no shared effect among previous operators, break - if (!init && shared_eff.empty()) - break; - // Else, insert effects of this operator into set "next_eff" if - // it is an unconditional effect or a conditional effect that is sure to - // happen. (Such "trivial" conditions can arise due to our translator, - // e.g. in Schedule. There, the same effect is conditioned on a disjunction - // of conditions of which one will always be true. We test for a simple kind - // of these trivial conditions here.) - EffectsProxy effects = - get_operator_or_axiom(task_proxy, op_or_axiom_id).get_effects(); - set trivially_conditioned_effects; - bool trivial_conditioned_effects_found = - effect_always_happens(variables, effects, - trivially_conditioned_effects); - unordered_map next_eff; - for (EffectProxy effect : effects) { - FactPair effect_fact = effect.get_fact().get_pair(); - if (effect.get_conditions().empty() && - effect_fact.var != atom_a.var) { - next_eff.emplace(effect_fact.var, effect_fact.value); - } else if (trivial_conditioned_effects_found && - trivially_conditioned_effects.find(effect_fact) - != trivially_conditioned_effects.end()) - next_eff.emplace(effect_fact.var, effect_fact.value); - } - // Intersect effects of this operator with those of previous operators - if (init) - swap(shared_eff, next_eff); - else { - unordered_map result; - for (const auto &eff1 : shared_eff) { - auto it2 = next_eff.find(eff1.first); - if (it2 != next_eff.end() && it2->second == eff1.second) - result.insert(eff1); - } - swap(shared_eff, result); - } - init = false; - } - // Test whether one of the shared effects is inconsistent with b - for (const pair &eff : shared_eff) { - const FactProxy &effect_fact = - variables[eff.first].get_fact(eff.second); - if (effect_fact != fact_a && - effect_fact != fact_b && - effect_fact.is_mutex(fact_b)) - return true; - } - } - - /* // Experimentally commenting this out -- see issue202. - // 3. Exists LM x, inconsistent x, b and x->_gn a - for (const auto &parent : node_a->parents) { - const LandmarkNode &node = *parent.first; - edge_type edge = parent.second; - for (const FactPair &parent_prop : node.facts) { - const FactProxy &parent_prop_fact = - variables[parent_prop.var].get_fact(parent_prop.value); - if (edge >= greedy_necessary && - parent_prop_fact != fact_b && - parent_prop_fact.is_mutex(fact_b)) - return true; - } - } - */ - } - // No inconsistency found - return false; -} - -void LandmarkFactoryReasonableOrdersHPS::collect_ancestors( - unordered_set &result, LandmarkNode &node) { - // Returns all ancestors in the landmark graph of landmark node "start". - - // There could be cycles if use_reasonable == true - list open_nodes; - unordered_set closed_nodes; - for (const auto &p : node.parents) { - LandmarkNode &parent = *(p.first); - const OrderingType &type = p.second; - if (type >= OrderingType::NATURAL && closed_nodes.count(&parent) == 0) { - open_nodes.push_back(&parent); - closed_nodes.insert(&parent); - result.insert(&parent); - } - } - while (!open_nodes.empty()) { - LandmarkNode &node2 = *(open_nodes.front()); - for (const auto &p : node2.parents) { - LandmarkNode &parent = *(p.first); - const OrderingType &type = p.second; - if (type >= OrderingType::NATURAL && closed_nodes.count(&parent) == 0) { - open_nodes.push_back(&parent); - closed_nodes.insert(&parent); - result.insert(&parent); - } - } - open_nodes.pop_front(); - } -} - -bool LandmarkFactoryReasonableOrdersHPS::effect_always_happens( +// TODO: Refactor this monster. +static bool effect_always_happens( const VariablesProxy &variables, const EffectsProxy &effects, - set &eff) const { + set &eff) { /* Test whether the condition of a conditional effect is trivial, i.e. always true. We test for the simple case that the same effect proposition is triggered by a set of conditions of which one will always be true. This is e.g. the case in @@ -391,6 +247,183 @@ bool LandmarkFactoryReasonableOrdersHPS::effect_always_happens( return eff.empty(); } +/* + Insert effects of this operator into `effect` if it is an + unconditional effect or a conditional effect that is sure to happen. + (Such "trivial" conditions can arise due to our translator, e.g. in + Schedule. There, the same effect is conditioned on a disjunction of + conditions of which one will always be true. We test for a simple kind + of these trivial conditions here.) +*/ +static utils::HashSet get_effects_on_other_variables( + const TaskProxy &task_proxy, int op_or_axiom_id, int var_id) { + EffectsProxy effects = + get_operator_or_axiom(task_proxy, op_or_axiom_id).get_effects(); + set trivially_conditioned_effects; + bool trivial_conditioned_effects_found = + effect_always_happens(task_proxy.get_variables(), effects, + trivially_conditioned_effects); + utils::HashSet next_effect; + for (const EffectProxy &effect : effects) { + FactPair atom = effect.get_fact().get_pair(); + // TODO: Why only on other variables? + if (effect.get_conditions().empty() && atom.var != var_id) { + next_effect.insert(atom); + } else if (trivial_conditioned_effects_found && + trivially_conditioned_effects.contains(atom)) { + next_effect.insert(atom); + } + } + return next_effect; +} + +utils::HashSet LandmarkFactoryReasonableOrdersHPS::get_shared_effects_of_achievers( + const FactPair &atom, const TaskProxy &task_proxy) const { + utils::HashSet shared_effects; + + // Intersect effects of operators that achieve `atom` one by one. + bool init = true; + for (int op_or_axiom_id : get_operators_including_effect(atom)) { + utils::HashSet effect = get_effects_on_other_variables( + task_proxy, op_or_axiom_id, atom.var); + + if (init) { + swap(shared_effects, effect); + init = false; + } else { + for (const FactPair &eff : shared_effects) { + if (!effect.contains(eff)) { + shared_effects.erase(eff); + } + } + } + + // Abort if previous operators have no shared effects. + if (shared_effects.empty()) { + assert(!init); + break; + } + } + return shared_effects; +} + +/* + An atom B interferes with another atom A if achieving A is impossible while B + holds. This can be either because B may not be true when applying any operator + that achieves A (because it cannot hold at the same time as these + preconditions) or because the effects of operators that achieve A delete B. + Specifically, B interferes with A if one of the following conditions holds: + 1. A and B are mutex. + 2. All operators that add A also add E, and E and B are mutex. + 3. There is a greedy-necessary predecessor X of A, and X and B are mutex. + This is the definition of Hoffmann et al. except that they have one more + condition: "All actions that add A delete B." However, in our case (SAS+ + formulation), this condition is the same as 2. +*/ +bool LandmarkFactoryReasonableOrdersHPS::interferes( + const VariablesProxy &variables, const Landmark &landmark_a, + const FactPair &atom_a, const FactProxy &a, const FactProxy &b) const { + // Case 1: A and B are mutex. + if (a.is_mutex(b)) { + return true; + } + + /* + Case 2: All operators reaching A have a shared effect E, and E and B are + mutex. + Skip this case for conjunctive landmarks A, as they are typically achieved + through a sequence of operators successively adding the parts of A. + */ + if (landmark_a.is_conjunctive) { + return false; + } + + unordered_map shared_eff = get_shared_effects_of_achievers(atom_a); + // Test whether one of the shared effects is inconsistent with b + for (const pair &eff : shared_eff) { + const FactProxy &effect_fact = + variables[eff.first].get_fact(eff.second); + if (effect_fact != a && + effect_fact != b && + effect_fact.is_mutex(b)) + return true; + } +} + +bool LandmarkFactoryReasonableOrdersHPS::interferes( + const TaskProxy &task_proxy, const Landmark &landmark_a, + const Landmark &landmark_b) const { + assert(landmark_a != landmark_b); + assert(!landmark_a.is_disjunctive) + assert(!landmark_b.is_disjunctive); + + VariablesProxy variables = task_proxy.get_variables(); + for (const FactPair &atom_b : landmark_b.atoms) { + FactProxy b = variables[atom_b.var].get_fact(atom_b.value); + for (const FactPair &atom_a : landmark_a.atoms) { + FactProxy a = variables[atom_a.var].get_fact(atom_a.value); + if (atom_a == atom_b) { + if (landmark_a.is_conjunctive && landmark_b.is_conjunctive) { + continue; + } + return false; + } + if (interferes(atom_a, atom_b, a, b)) { + return false; + } + } + + /* // Experimentally commenting this out -- see issue202. + // 3. Exists LM x, inconsistent x, b and x->_gn a + for (const auto &parent : node_a->parents) { + const LandmarkNode &node = *parent.first; + edge_type edge = parent.second; + for (const FactPair &parent_prop : node.facts) { + const FactProxy &parent_prop_fact = + variables[parent_prop.var].get_fact(parent_prop.value); + if (edge >= greedy_necessary && + parent_prop_fact != fact_b && + parent_prop_fact.is_mutex(fact_b)) + return true; + } + } + */ + } + // No inconsistency found + return false; +} + +void LandmarkFactoryReasonableOrdersHPS::collect_ancestors( + unordered_set &result, LandmarkNode &node) { + // Returns all ancestors in the landmark graph of landmark node "start". + + // There could be cycles if use_reasonable == true + list open_nodes; + unordered_set closed_nodes; + for (const auto &p : node.parents) { + LandmarkNode &parent = *(p.first); + const OrderingType &type = p.second; + if (type >= OrderingType::NATURAL && closed_nodes.count(&parent) == 0) { + open_nodes.push_back(&parent); + closed_nodes.insert(&parent); + result.insert(&parent); + } + } + while (!open_nodes.empty()) { + LandmarkNode &node2 = *(open_nodes.front()); + for (const auto &p : node2.parents) { + LandmarkNode &parent = *(p.first); + const OrderingType &type = p.second; + if (type >= OrderingType::NATURAL && closed_nodes.count(&parent) == 0) { + open_nodes.push_back(&parent); + closed_nodes.insert(&parent); + result.insert(&parent); + } + } + open_nodes.pop_front(); + } +} + bool LandmarkFactoryReasonableOrdersHPS::supports_conditional_effects() const { return landmark_factory->supports_conditional_effects(); } diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h index 4841b5ec46..0c0821fb02 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h @@ -3,6 +3,8 @@ #include "landmark_factory.h" +#include + namespace landmarks { class LandmarkFactoryReasonableOrdersHPS : public LandmarkFactory { std::shared_ptr landmark_factory; @@ -19,14 +21,16 @@ class LandmarkFactoryReasonableOrdersHPS : public LandmarkFactory { const std::unordered_set &candidates, LandmarkNode &node, const Landmark &landmark) const; void approximate_reasonable_orderings(const TaskProxy &task_proxy); + utils::HashSet get_shared_effects_of_achievers( + const FactPair &atom, const TaskProxy &task_proxy) const; + bool interferes( + const VariablesProxy &variables, const Landmark &landmark_a, + const FactPair &atom_a, const FactProxy &a, const FactProxy &b) const; bool interferes( const TaskProxy &task_proxy, const Landmark &landmark_a, const Landmark &landmark_b) const; void collect_ancestors( std::unordered_set &result, LandmarkNode &node); - bool effect_always_happens( - const VariablesProxy &variables, const EffectsProxy &effects, - std::set &eff) const; public: LandmarkFactoryReasonableOrdersHPS( const std::shared_ptr &lm_factory, From 01a186e4f9638318327517c82ba33cce40eb8a1c Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 18 Mar 2025 11:54:52 +0100 Subject: [PATCH 047/108] Clean up interference function. --- .../landmark_factory_reasonable_orders_hps.cc | 92 ++++++++++--------- .../landmark_factory_reasonable_orders_hps.h | 5 +- 2 files changed, 50 insertions(+), 47 deletions(-) diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 0be1cc345c..62f46b5ead 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -260,23 +260,31 @@ static utils::HashSet get_effects_on_other_variables( EffectsProxy effects = get_operator_or_axiom(task_proxy, op_or_axiom_id).get_effects(); set trivially_conditioned_effects; + // TODO: Strange return value. bool trivial_conditioned_effects_found = effect_always_happens(task_proxy.get_variables(), effects, trivially_conditioned_effects); utils::HashSet next_effect; for (const EffectProxy &effect : effects) { FactPair atom = effect.get_fact().get_pair(); - // TODO: Why only on other variables? - if (effect.get_conditions().empty() && atom.var != var_id) { - next_effect.insert(atom); - } else if (trivial_conditioned_effects_found && - trivially_conditioned_effects.contains(atom)) { + if ((effect.get_conditions().empty() && atom.var != var_id) || + (trivial_conditioned_effects_found && + trivially_conditioned_effects.contains(atom))) { next_effect.insert(atom); } } return next_effect; } +static void intersect_inplace(utils::HashSet &set, + const utils::HashSet &other) { + for (const FactPair &atom : other) { + if (!set.contains(atom)) { + set.erase(atom); + } + } +} + utils::HashSet LandmarkFactoryReasonableOrdersHPS::get_shared_effects_of_achievers( const FactPair &atom, const TaskProxy &task_proxy) const { utils::HashSet shared_effects; @@ -291,14 +299,9 @@ utils::HashSet LandmarkFactoryReasonableOrdersHPS::get_shared_effects_ swap(shared_effects, effect); init = false; } else { - for (const FactPair &eff : shared_effects) { - if (!effect.contains(eff)) { - shared_effects.erase(eff); - } - } + intersect_inplace(shared_effects, effect); } - // Abort if previous operators have no shared effects. if (shared_effects.empty()) { assert(!init); break; @@ -321,40 +324,56 @@ utils::HashSet LandmarkFactoryReasonableOrdersHPS::get_shared_effects_ formulation), this condition is the same as 2. */ bool LandmarkFactoryReasonableOrdersHPS::interferes( - const VariablesProxy &variables, const Landmark &landmark_a, - const FactPair &atom_a, const FactProxy &a, const FactProxy &b) const { - // Case 1: A and B are mutex. + const VariablesProxy &variables, const TaskProxy &task_proxy, + const Landmark &landmark_a, const FactPair &atom_a, const FactProxy &a, + const FactProxy &b) const { + // 1. A and B are mutex. if (a.is_mutex(b)) { return true; } /* - Case 2: All operators reaching A have a shared effect E, and E and B are - mutex. + 2. All operators reaching A have a shared effect E, and E and B are mutex. Skip this case for conjunctive landmarks A, as they are typically achieved through a sequence of operators successively adding the parts of A. */ if (landmark_a.is_conjunctive) { return false; } + utils::HashSet shared_effect = + get_shared_effects_of_achievers(atom_a, task_proxy); + return ranges::any_of( + shared_effect.begin(), shared_effect.end(), [&](const FactPair &atom) { + const FactProxy &e = variables[atom.var].get_fact(atom.value); + return e != a && e != b && e.is_mutex(b); + }); - unordered_map shared_eff = get_shared_effects_of_achievers(atom_a); - // Test whether one of the shared effects is inconsistent with b - for (const pair &eff : shared_eff) { - const FactProxy &effect_fact = - variables[eff.first].get_fact(eff.second); - if (effect_fact != a && - effect_fact != b && - effect_fact.is_mutex(b)) - return true; + /* + Experimentally commenting this out -- see issue202. + TODO: This code became unreachable and no longer works after + all the refactorings we did recently. + // Case 3: There exists an atom X inconsistent with B such that X->_gn A. + for (const auto &parent : node_a->parents) { + const LandmarkNode &node = *parent.first; + edge_type edge = parent.second; + for (const FactPair &parent_prop : node.facts) { + const FactProxy &parent_prop_fact = + variables[parent_prop.var].get_fact(parent_prop.value); + if (edge >= greedy_necessary && + parent_prop_fact != fact_b && + parent_prop_fact.is_mutex(fact_b)) { + return true; + } + } } + */ } bool LandmarkFactoryReasonableOrdersHPS::interferes( const TaskProxy &task_proxy, const Landmark &landmark_a, const Landmark &landmark_b) const { assert(landmark_a != landmark_b); - assert(!landmark_a.is_disjunctive) + assert(!landmark_a.is_disjunctive); assert(!landmark_b.is_disjunctive); VariablesProxy variables = task_proxy.get_variables(); @@ -368,28 +387,11 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( } return false; } - if (interferes(atom_a, atom_b, a, b)) { - return false; - } - } - - /* // Experimentally commenting this out -- see issue202. - // 3. Exists LM x, inconsistent x, b and x->_gn a - for (const auto &parent : node_a->parents) { - const LandmarkNode &node = *parent.first; - edge_type edge = parent.second; - for (const FactPair &parent_prop : node.facts) { - const FactProxy &parent_prop_fact = - variables[parent_prop.var].get_fact(parent_prop.value); - if (edge >= greedy_necessary && - parent_prop_fact != fact_b && - parent_prop_fact.is_mutex(fact_b)) - return true; + if (interferes(variables, task_proxy, landmark_a, atom_a, a, b)) { + return true; } } - */ } - // No inconsistency found return false; } diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h index 0c0821fb02..7500c60a59 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h @@ -24,8 +24,9 @@ class LandmarkFactoryReasonableOrdersHPS : public LandmarkFactory { utils::HashSet get_shared_effects_of_achievers( const FactPair &atom, const TaskProxy &task_proxy) const; bool interferes( - const VariablesProxy &variables, const Landmark &landmark_a, - const FactPair &atom_a, const FactProxy &a, const FactProxy &b) const; + const VariablesProxy &variables, const TaskProxy &task_proxy, + const Landmark &landmark_a, const FactPair &atom_a, const FactProxy &a, + const FactProxy &b) const; bool interferes( const TaskProxy &task_proxy, const Landmark &landmark_a, const Landmark &landmark_b) const; From a8dd5c053d02217fb94bf55827af8b73eda4defb Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 18 Mar 2025 13:27:44 +0100 Subject: [PATCH 048/108] Revert some smaller changes to recover performance. --- src/search/cartesian_abstractions/utils_landmarks.cc | 2 +- src/search/landmarks/landmark_factory.cc | 2 -- src/search/landmarks/landmark_factory_h_m.cc | 1 + src/search/landmarks/landmark_factory_h_m.h | 1 + src/search/landmarks/landmark_factory_merged.cc | 4 ++-- .../landmarks/landmark_factory_reasonable_orders_hps.h | 2 ++ src/search/landmarks/landmark_factory_rpg_sasp.cc | 5 ++--- src/search/landmarks/landmark_factory_rpg_sasp.h | 2 +- src/search/landmarks/landmark_factory_zhu_givan.cc | 1 - src/search/landmarks/landmark_graph.cc | 2 +- src/search/landmarks/landmark_graph.h | 2 +- 11 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/search/cartesian_abstractions/utils_landmarks.cc b/src/search/cartesian_abstractions/utils_landmarks.cc index fb54b616ec..dd83b6f2cb 100644 --- a/src/search/cartesian_abstractions/utils_landmarks.cc +++ b/src/search/cartesian_abstractions/utils_landmarks.cc @@ -5,10 +5,10 @@ #include "../landmarks/landmark_factory_h_m.h" #include "../landmarks/landmark_graph.h" #include "../utils/logging.h" -#include "../utils/memory.h" #include #include +#include using namespace std; using namespace landmarks; diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index 43d1a5d7b3..888ead7622 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -68,8 +68,6 @@ static void remove_ordering(LandmarkNode &from, LandmarkNode &to) { assert(to.parents.contains(&from)); from.children.erase(&to); to.parents.erase(&from); - assert(!to.parents.contains(&from)); - assert(!from.children.contains(&to)); } void LandmarkFactory::add_ordering( diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 827ea207af..92dd7a4adf 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -13,6 +13,7 @@ #include #include +#include using namespace std; using utils::ExitCode; diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index f3b646c088..3d70b9b31a 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -5,6 +5,7 @@ #include #include +#include namespace landmarks { using FluentSet = std::vector; diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index 14023e5f1a..996a5d96b4 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -25,8 +25,8 @@ LandmarkFactoryMerged::LandmarkFactoryMerged( LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( const Landmark &landmark) const { if (landmark.is_disjunctive) { - if (landmark_graph->contains_identical_disjunctive_landmark( - landmark.atoms)) { + set atoms(landmark.atoms.begin(), landmark.atoms.end()); + if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) { return &landmark_graph->get_disjunctive_landmark_node( landmark.atoms[0]); } diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h index 3effeb4acd..25705bfaae 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h @@ -3,6 +3,8 @@ #include "landmark_factory.h" +#include + namespace landmarks { class LandmarkFactoryReasonableOrdersHPS : public LandmarkFactory { std::shared_ptr landmark_factory; diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index cc518f84b8..e7fb873faa 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -13,6 +13,7 @@ #include #include +#include using namespace std; using utils::ExitCode; @@ -239,9 +240,7 @@ void LandmarkFactoryRpgSasp::found_disjunctive_landmark_and_ordering( // Note: don't add orders as we can't be sure that they're correct return; } else if (landmark_graph->contains_overlapping_disjunctive_landmark(atoms)) { - vector atoms_vector(atoms.begin(), atoms.end()); - if (landmark_graph->contains_identical_disjunctive_landmark( - atoms_vector)) { + if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) { new_landmark_node = &landmark_graph->get_disjunctive_landmark_node(*atoms.begin()); add_ordering_or_replace_if_stronger(*new_landmark_node, node, type); diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index fd3d408099..adaaf20fb1 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -6,8 +6,8 @@ #include "../utils/hash.h" #include +#include #include -#include #include namespace landmarks { diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index 15659a14e9..6135303e4e 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -9,7 +9,6 @@ #include "../plugins/plugin.h" #include "../utils/logging.h" -#include #include #include diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index d68b431e20..b04bcd7f62 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -60,7 +60,7 @@ bool LandmarkGraph::contains_overlapping_disjunctive_landmark( } bool LandmarkGraph::contains_identical_disjunctive_landmark( - const vector &atoms) const { + const set &atoms) const { assert(!atoms.empty()); const LandmarkNode *node = nullptr; for (const FactPair &atom : atoms) { diff --git a/src/search/landmarks/landmark_graph.h b/src/search/landmarks/landmark_graph.h index c37a7bee91..72ac95ce5f 100644 --- a/src/search/landmarks/landmark_graph.h +++ b/src/search/landmarks/landmark_graph.h @@ -141,7 +141,7 @@ class LandmarkGraph { /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ bool contains_identical_disjunctive_landmark( - const std::vector &atoms) const; + const std::set &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ From d40325cbb073131ffc127ac1f6da0a8ee1cf4009 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 18 Mar 2025 22:25:10 +0100 Subject: [PATCH 049/108] Refactor stuff around effects that always happen. --- .../landmark_factory_reasonable_orders_hps.cc | 193 +++++++++--------- 1 file changed, 91 insertions(+), 102 deletions(-) diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 62f46b5ead..9d7addcb65 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -132,119 +132,112 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orderings( } } -// TODO: Refactor this monster. -static bool effect_always_happens( - const VariablesProxy &variables, const EffectsProxy &effects, - set &eff) { - /* Test whether the condition of a conditional effect is trivial, i.e. always true. - We test for the simple case that the same effect proposition is triggered by - a set of conditions of which one will always be true. This is e.g. the case in - Schedule, where the effect - (forall (?oldpaint - colour) - (when (painted ?x ?oldpaint) - (not (painted ?x ?oldpaint)))) - is translated by the translator to: if oldpaint == blue, then not painted ?x, and if - oldpaint == red, then not painted ?x etc. - If conditional effects are found that are always true, they are returned in "eff". - */ - // Go through all effects of operator and collect: - // - all variables that are set to some value in a conditional effect (effect_vars) - // - variables that can be set to more than one value in a cond. effect (nogood_effect_vars) - // - a mapping from cond. effect propositions to all the conditions that they appear with - set effect_vars; - set nogood_effect_vars; - map>> effect_conditions_by_variable; +struct EffectConditionSet { + int value; + utils::HashSet conditions; +}; + +static unordered_map compute_effect_conditions_by_variable( + const EffectsProxy &effects) { + // Variables that occur in multiple effects with different values. + unordered_set nogood_effect_vars; + unordered_map effect_conditions_by_variable; for (EffectProxy effect : effects) { EffectConditionsProxy effect_conditions = effect.get_conditions(); - FactProxy effect_fact = effect.get_fact(); - int var_id = effect_fact.get_variable().get_id(); - int value = effect_fact.get_value(); - if (effect_conditions.empty() || - nogood_effect_vars.find(var_id) != nogood_effect_vars.end()) { - // Var has no condition or can take on different values, skipping + auto [var, value] = effect.get_fact().get_pair(); + if (effect_conditions.empty() || nogood_effect_vars.contains(var)) { continue; } - if (effect_vars.find(var_id) != effect_vars.end()) { - // We have seen this effect var before - assert(effect_conditions_by_variable.find(var_id) != effect_conditions_by_variable.end()); - int old_eff = effect_conditions_by_variable.find(var_id)->second.first; - if (old_eff != value) { - // Was different effect - nogood_effect_vars.insert(var_id); + if (effect_conditions_by_variable.contains(var)) { + // We have seen `var` in an effect before. + int old_effect_value = effect_conditions_by_variable[var].value; + if (old_effect_value != value) { + nogood_effect_vars.insert(var); continue; } - } else { - // We have not seen this effect var before - effect_vars.insert(var_id); - } - if (effect_conditions_by_variable.find(var_id) != effect_conditions_by_variable.end() - && effect_conditions_by_variable.find(var_id)->second.first == value) { - // We have seen this effect before, adding conditions + + // Add more conditions to this previously seen effect. for (FactProxy effect_condition : effect_conditions) { - vector &vec = effect_conditions_by_variable.find(var_id)->second.second; - vec.push_back(effect_condition.get_pair()); + effect_conditions_by_variable[var].conditions.insert( + effect_condition.get_pair()); } } else { - // We have not seen this effect before, making new effect entry - vector &vec = effect_conditions_by_variable.emplace( - var_id, make_pair( - value, vector ())).first->second.second; + // We have not seen this effect `var` before, so we add a new entry. + utils::HashSet conditions; + conditions.reserve(effect_conditions.size()); for (FactProxy effect_condition : effect_conditions) { - vec.push_back(effect_condition.get_pair()); + conditions.insert(effect_condition.get_pair()); } + effect_conditions_by_variable[var] = {value, move(conditions)}; } } + for (int var : nogood_effect_vars) { + effect_conditions_by_variable.erase(var); + } + return effect_conditions_by_variable; +} - // For all those effect propositions whose variables do not take on different values... - for (const auto &effect_conditions : effect_conditions_by_variable) { - if (nogood_effect_vars.find(effect_conditions.first) != nogood_effect_vars.end()) { - continue; +static unordered_map> get_conditions_by_variable( + const EffectConditionSet &effect_conditions) { + unordered_map> conditions_by_var; + for (auto [var, value] : effect_conditions.conditions) { + conditions_by_var[var].insert(value); + } + return conditions_by_var; +} + +static bool effect_always_happens( + int effect_var, const EffectConditionSet &effect_conditions, + const VariablesProxy &variables) { + unordered_map> conditions_by_var = + get_conditions_by_variable(effect_conditions); + + /* + The effect always happens if for all variables with effect conditions it + holds that (1) the effect triggers for all possible values in their domain + or (2) the variable of the condition is the variable of the effect and the + effect triggers for all other non-effect values in their domain. + */ + for (auto &[conditions_var, values] : conditions_by_var) { + size_t domain_size = variables[effect_var].get_domain_size(); + assert(values.size() <= domain_size); + if (effect_var == conditions_var) { + /* Extending the `values` with the `effect_conditions.value` + completes the domain if the effect always happens. */ + values.insert(effect_conditions.value); } - // ...go through all the conditions that the effect has, and map condition - // variables to the set of values they take on (in unique_conds) - map> unique_conds; - for (const FactPair &cond : effect_conditions.second.second) { - if (unique_conds.find(cond.var) != unique_conds.end()) { - unique_conds.find(cond.var)->second.insert( - cond.value); - } else { - set &the_set = unique_conds.emplace(cond.var, set()).first->second; - the_set.insert(cond.value); - } + if (values.size() < domain_size) { + return false; } - // Check for each condition variable whether the number of values it takes on is - // equal to the domain of that variable... - bool is_always_reached = true; - for (auto &unique_cond : unique_conds) { - bool is_surely_reached_by_var = false; - int num_values_for_cond = unique_cond.second.size(); - int num_values_of_variable = variables[unique_cond.first].get_domain_size(); - if (num_values_for_cond == num_values_of_variable) { - is_surely_reached_by_var = true; - } - // ...or else if the condition variable is the same as the effect variable, - // check whether the condition variable takes on all other values except the - // effect value - else if (unique_cond.first == effect_conditions.first && - num_values_for_cond == num_values_of_variable - 1) { - // Number of different values is correct, now ensure that the effect value - // was the one missing - unique_cond.second.insert(effect_conditions.second.first); - num_values_for_cond = unique_cond.second.size(); - if (num_values_for_cond == num_values_of_variable) { - is_surely_reached_by_var = true; - } - } - // If one of the condition variables does not fulfill the criteria, the effect - // is not certain to happen - if (!is_surely_reached_by_var) - is_always_reached = false; + } + return true; +} + +/* + Test whether the condition of a conditional effect is trivial, i.e. always + true. We test for the simple case that the same effect proposition is + triggered by a set of conditions of which one will always be true. This is for + example the case in Schedule, where the effect + (forall (?oldpaint - colour) + (when (painted ?x ?oldpaint) + (not (painted ?x ?oldpaint)))) + is translated by the translator to: + if oldpaint == blue, then not painted ?x, and + if oldpaint == red, then not painted ?x, etc. + Conditional effects that always happen are returned in `always_effects`. +*/ +static utils::HashSet get_effects_that_always_happen( + const VariablesProxy &variables, const EffectsProxy &effects) { + unordered_map effect_conditions_by_variable = + compute_effect_conditions_by_variable(effects); + + utils::HashSet always_effects; + for (const auto &[var, effect_conditions] : effect_conditions_by_variable) { + if (effect_always_happens(var, effect_conditions, variables)) { + always_effects.insert(FactPair(var, effect_conditions.value)); } - if (is_always_reached) - eff.insert(FactPair( - effect_conditions.first, effect_conditions.second.first)); } - return eff.empty(); + return always_effects; } /* @@ -259,17 +252,13 @@ static utils::HashSet get_effects_on_other_variables( const TaskProxy &task_proxy, int op_or_axiom_id, int var_id) { EffectsProxy effects = get_operator_or_axiom(task_proxy, op_or_axiom_id).get_effects(); - set trivially_conditioned_effects; - // TODO: Strange return value. - bool trivial_conditioned_effects_found = - effect_always_happens(task_proxy.get_variables(), effects, - trivially_conditioned_effects); + utils::HashSet trivially_conditioned_effects = + get_effects_that_always_happen(task_proxy.get_variables(), effects); utils::HashSet next_effect; for (const EffectProxy &effect : effects) { FactPair atom = effect.get_fact().get_pair(); if ((effect.get_conditions().empty() && atom.var != var_id) || - (trivial_conditioned_effects_found && - trivially_conditioned_effects.contains(atom))) { + trivially_conditioned_effects.contains(atom)) { next_effect.insert(atom); } } From dcf87cef5bd660250f63fb9e3f7b75a8eb5f43dc Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 18 Mar 2025 22:49:10 +0100 Subject: [PATCH 050/108] Simplify collect_ancestors. --- .../landmark_factory_reasonable_orders_hps.cc | 42 +++++-------------- .../landmark_factory_reasonable_orders_hps.h | 2 - 2 files changed, 11 insertions(+), 33 deletions(-) diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 9d7addcb65..7509d94ed1 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -9,6 +9,7 @@ #include "../utils/logging.h" #include "../utils/markup.h" +#include #include #include #include @@ -54,6 +55,16 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_goal_orderings( } } +static void collect_ancestors(unordered_set &result, + const LandmarkNode &node) { + for (const auto &[parent, type] : node.parents) { + if (type >= OrderingType::NATURAL && !result.contains(parent)) { + result.insert(parent); + collect_ancestors(result, *parent); + } + } +} + unordered_set LandmarkFactoryReasonableOrdersHPS::collect_reasonable_ordering_candidates( const LandmarkNode &node) { unordered_set interesting_nodes; @@ -384,37 +395,6 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( return false; } -void LandmarkFactoryReasonableOrdersHPS::collect_ancestors( - unordered_set &result, LandmarkNode &node) { - // Returns all ancestors in the landmark graph of landmark node "start". - - // There could be cycles if use_reasonable == true - list open_nodes; - unordered_set closed_nodes; - for (const auto &p : node.parents) { - LandmarkNode &parent = *(p.first); - const OrderingType &type = p.second; - if (type >= OrderingType::NATURAL && closed_nodes.count(&parent) == 0) { - open_nodes.push_back(&parent); - closed_nodes.insert(&parent); - result.insert(&parent); - } - } - while (!open_nodes.empty()) { - LandmarkNode &node2 = *(open_nodes.front()); - for (const auto &p : node2.parents) { - LandmarkNode &parent = *(p.first); - const OrderingType &type = p.second; - if (type >= OrderingType::NATURAL && closed_nodes.count(&parent) == 0) { - open_nodes.push_back(&parent); - closed_nodes.insert(&parent); - result.insert(&parent); - } - } - open_nodes.pop_front(); - } -} - bool LandmarkFactoryReasonableOrdersHPS::supports_conditional_effects() const { return landmark_factory->supports_conditional_effects(); } diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h index 7500c60a59..2cc8645ae4 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h @@ -30,8 +30,6 @@ class LandmarkFactoryReasonableOrdersHPS : public LandmarkFactory { bool interferes( const TaskProxy &task_proxy, const Landmark &landmark_a, const Landmark &landmark_b) const; - void collect_ancestors( - std::unordered_set &result, LandmarkNode &node); public: LandmarkFactoryReasonableOrdersHPS( const std::shared_ptr &lm_factory, From 2d276f486c1f83e882b5e6d3416c166e5fccbdab Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 18 Mar 2025 22:50:50 +0100 Subject: [PATCH 051/108] Turn function static. --- src/search/landmarks/landmark_factory_reasonable_orders_hps.cc | 2 +- src/search/landmarks/landmark_factory_reasonable_orders_hps.h | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 7509d94ed1..166b6257e7 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -65,7 +65,7 @@ static void collect_ancestors(unordered_set &result, } } -unordered_set LandmarkFactoryReasonableOrdersHPS::collect_reasonable_ordering_candidates( +static unordered_set collect_reasonable_ordering_candidates( const LandmarkNode &node) { unordered_set interesting_nodes; for (const auto &[child, type] : node.children) { diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h index 2cc8645ae4..5fe86a7f90 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h @@ -14,8 +14,6 @@ class LandmarkFactoryReasonableOrdersHPS : public LandmarkFactory { void approximate_goal_orderings( const TaskProxy &task_proxy, LandmarkNode &node) const; - std::unordered_set collect_reasonable_ordering_candidates( - const LandmarkNode &node); void insert_reasonable_orderings( const TaskProxy &task_proxy, const std::unordered_set &candidates, From 39689bb54a42f4a83d58eaf08e692ab414c063c5 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 18 Mar 2025 22:52:06 +0100 Subject: [PATCH 052/108] Improve docs. --- src/search/landmarks/landmark_factory_reasonable_orders_hps.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 166b6257e7..4737e11c1b 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -417,7 +417,7 @@ class LandmarkFactoryReasonableOrdersHPSFeature document_note( "Obedient-reasonable orders", - "Hoffmann et al. (2004) suggest obedient-reasonable orders in " + "Hoffmann et al. (2004) suggest obedient-reasonable orderings in " "addition to reasonable orders. Obedient-reasonable orders were " "later also used by the LAMA planner (Richter and Westphal, 2010). " "They are \"reasonable orders\" under the assumption that all " From 6081030b20b186a71104d9c123fd095f02426469 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 19 Mar 2025 10:34:23 +0100 Subject: [PATCH 053/108] Clean up relaxation factory. --- .../landmarks/landmark_factory_relaxation.cc | 60 +++++++++---------- .../landmarks/landmark_factory_relaxation.h | 20 +++---- .../landmarks/landmark_factory_rpg_exhaust.cc | 16 +++++ 3 files changed, 52 insertions(+), 44 deletions(-) diff --git a/src/search/landmarks/landmark_factory_relaxation.cc b/src/search/landmarks/landmark_factory_relaxation.cc index 6b1407df81..5b8c8ff238 100644 --- a/src/search/landmarks/landmark_factory_relaxation.cc +++ b/src/search/landmarks/landmark_factory_relaxation.cc @@ -13,7 +13,8 @@ LandmarkFactoryRelaxation::LandmarkFactoryRelaxation( : LandmarkFactory(verbosity) { } -void LandmarkFactoryRelaxation::generate_landmarks(const shared_ptr &task) { +void LandmarkFactoryRelaxation::generate_landmarks( + const shared_ptr &task) { TaskProxy task_proxy(*task); Exploration exploration(task_proxy, log); generate_relaxed_landmarks(task, exploration); @@ -26,46 +27,41 @@ void LandmarkFactoryRelaxation::postprocess( calc_achievers(task_proxy, exploration); } +void LandmarkFactoryRelaxation::compute_possible_achievers( + Landmark &landmark, const VariablesProxy &variables) { + for (const FactPair &atom : landmark.atoms) { + const vector &ops = get_operators_including_effect(atom); + landmark.possible_achievers.insert(ops.begin(), ops.end()); + + if (variables[atom.var].is_derived()) { + landmark.is_derived = true; + } + } +} + +static void compute_first_achievers( + Landmark &landmark, const vector> &reached, + const TaskProxy &task_proxy) { + for (int op_or_axom_id : landmark.possible_achievers) { + OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axom_id); + + if (possibly_reaches_landmark(op, reached, landmark)) { + landmark.first_achievers.insert(op_or_axom_id); + } + } +} + void LandmarkFactoryRelaxation::calc_achievers( const TaskProxy &task_proxy, Exploration &exploration) { assert(!achievers_calculated); VariablesProxy variables = task_proxy.get_variables(); for (const auto &node : *landmark_graph) { Landmark &landmark = node->get_landmark(); - for (const FactPair &atom : landmark.atoms) { - const vector &ops = get_operators_including_effect(atom); - landmark.possible_achievers.insert(ops.begin(), ops.end()); - - if (variables[atom.var].is_derived()) - landmark.is_derived = true; - } - + compute_possible_achievers(landmark, variables); vector> reached = exploration.compute_relaxed_reachability(landmark.atoms, false); - - for (int op_or_axom_id : landmark.possible_achievers) { - OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axom_id); - - if (possibly_reaches_landmark(op, reached, landmark)) { - landmark.first_achievers.insert(op_or_axom_id); - } - } + compute_first_achievers(landmark, reached, task_proxy); } achievers_calculated = true; } - -// TODO: Move this to lm_exhaust and make it a static function. -bool LandmarkFactoryRelaxation::relaxed_task_solvable( - const TaskProxy &task_proxy, Exploration &exploration, - const Landmark &landmark, const bool use_unary_relaxation) { - vector> reached = exploration.compute_relaxed_reachability( - landmark.atoms, use_unary_relaxation); - - for (FactProxy goal : task_proxy.get_goals()) { - if (!reached[goal.get_variable().get_id()][goal.get_value()]) { - return false; - } - } - return true; -} } diff --git a/src/search/landmarks/landmark_factory_relaxation.h b/src/search/landmarks/landmark_factory_relaxation.h index d03169d2b1..97fcd27ea5 100644 --- a/src/search/landmarks/landmark_factory_relaxation.h +++ b/src/search/landmarks/landmark_factory_relaxation.h @@ -7,23 +7,19 @@ namespace landmarks { class Exploration; class LandmarkFactoryRelaxation : public LandmarkFactory { -protected: - explicit LandmarkFactoryRelaxation(utils::Verbosity verbosity); - - /* Test whether the relaxed planning task is solvable without - achieving the excluded landmark. */ - bool relaxed_task_solvable( - const TaskProxy &task_proxy, Exploration &exploration, - const Landmark &landmark, bool use_unary_relaxation); - -private: void generate_landmarks(const std::shared_ptr &task) override; - virtual void generate_relaxed_landmarks(const std::shared_ptr &task, - Exploration &exploration) = 0; + virtual void generate_relaxed_landmarks( + const std::shared_ptr &task, + Exploration &exploration) = 0; void postprocess(const TaskProxy &task_proxy, Exploration &exploration); + void compute_possible_achievers( + Landmark &landmark, const VariablesProxy &variables); void calc_achievers(const TaskProxy &task_proxy, Exploration &exploration); + +protected: + explicit LandmarkFactoryRelaxation(utils::Verbosity verbosity); }; } diff --git a/src/search/landmarks/landmark_factory_rpg_exhaust.cc b/src/search/landmarks/landmark_factory_rpg_exhaust.cc index 6dc7d8af35..893a30cb2f 100644 --- a/src/search/landmarks/landmark_factory_rpg_exhaust.cc +++ b/src/search/landmarks/landmark_factory_rpg_exhaust.cc @@ -1,5 +1,6 @@ #include "landmark_factory_rpg_exhaust.h" +#include "exploration.h" #include "landmark.h" #include "landmark_graph.h" @@ -22,6 +23,21 @@ LandmarkFactoryRpgExhaust::LandmarkFactoryRpgExhaust( use_unary_relaxation(use_unary_relaxation) { } +// Test whether the goal is reachable without achieving `landmark`. +static bool relaxed_task_solvable( + const TaskProxy &task_proxy, Exploration &exploration, + const Landmark &landmark, const bool use_unary_relaxation) { + vector> reached = exploration.compute_relaxed_reachability( + landmark.atoms, use_unary_relaxation); + + for (FactProxy goal : task_proxy.get_goals()) { + if (!reached[goal.get_variable().get_id()][goal.get_value()]) { + return false; + } + } + return true; +} + void LandmarkFactoryRpgExhaust::generate_relaxed_landmarks( const shared_ptr &task, Exploration &exploration) { TaskProxy task_proxy(*task); From 1387bc6db5e8e111f43fb7dc67694e38ac0089f6 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 19 Mar 2025 10:45:37 +0100 Subject: [PATCH 054/108] Clean up lm_exhaust. --- .../landmarks/landmark_factory_rpg_exhaust.cc | 39 ++++++++++++------- .../landmarks/landmark_factory_rpg_exhaust.h | 8 +++- 2 files changed, 31 insertions(+), 16 deletions(-) diff --git a/src/search/landmarks/landmark_factory_rpg_exhaust.cc b/src/search/landmarks/landmark_factory_rpg_exhaust.cc index 893a30cb2f..7e5ef498c1 100644 --- a/src/search/landmarks/landmark_factory_rpg_exhaust.cc +++ b/src/search/landmarks/landmark_factory_rpg_exhaust.cc @@ -10,12 +10,15 @@ #include "../utils/logging.h" #include + using namespace std; namespace landmarks { -/* Problem: We don't get any orders here. (All we have is the reasonable orders - that are inferred later.) It's thus best to combine this landmark generation - method with others, don't use it by itself. */ +/* + Problem: We don't get any orderings here. (Reasonable orderings can also not + be inferred later in the absence of natural orderings.) It's thus best to + combine this landmark generation method with others, don't use it by itself. +*/ LandmarkFactoryRpgExhaust::LandmarkFactoryRpgExhaust( bool use_unary_relaxation, utils::Verbosity verbosity) @@ -38,19 +41,16 @@ static bool relaxed_task_solvable( return true; } -void LandmarkFactoryRpgExhaust::generate_relaxed_landmarks( - const shared_ptr &task, Exploration &exploration) { - TaskProxy task_proxy(*task); - if (log.is_at_least_normal()) { - log << "Generating landmarks by testing all facts with RPG method" << endl; - } - - // insert goal landmarks and mark them as goals +void LandmarkFactoryRpgExhaust::generate_goal_landmarks( + const TaskProxy &task_proxy) { for (FactProxy goal : task_proxy.get_goals()) { Landmark landmark({goal.get_pair()}, false, false, true); landmark_graph->add_landmark(move(landmark)); } - // test all other possible facts +} + +void LandmarkFactoryRpgExhaust::generate_all_simple_landmarks( + const TaskProxy &task_proxy, Exploration &exploration) const { for (VariableProxy var : task_proxy.get_variables()) { for (int value = 0; value < var.get_domain_size(); ++value) { const FactPair atom(var.get_id(), value); @@ -65,6 +65,17 @@ void LandmarkFactoryRpgExhaust::generate_relaxed_landmarks( } } +void LandmarkFactoryRpgExhaust::generate_relaxed_landmarks( + const shared_ptr &task, Exploration &exploration) { + TaskProxy task_proxy(*task); + if (log.is_at_least_normal()) { + log << "Generating landmarks by testing all atoms with RPG method" + << endl; + } + generate_goal_landmarks(task_proxy); + generate_all_simple_landmarks(task_proxy, exploration); +} + bool LandmarkFactoryRpgExhaust::supports_conditional_effects() const { return false; } @@ -75,7 +86,7 @@ class LandmarkFactoryRpgExhaustFeature LandmarkFactoryRpgExhaustFeature() : TypedFeature("lm_exhaust") { document_title("Exhaustive Landmarks"); document_synopsis( - "Exhaustively checks for each fact if it is a landmark." + "Exhaustively checks for each atom if it is a landmark." "This check is done using relaxed planning."); add_option( @@ -83,7 +94,7 @@ class LandmarkFactoryRpgExhaustFeature "compute landmarks of the unary relaxation, i.e., landmarks " "for the delete relaxation of a task transformation such that each " "operator is split into one operator for each of its effects. This " - "kind of landmark was previously known as \"causal landmarks\". " + "kind of landmarks was previously known as \"causal landmarks\". " "Setting the option to true can reduce the overall number of " "landmarks, which can make the search more memory-efficient but " "potentially less informative.", diff --git a/src/search/landmarks/landmark_factory_rpg_exhaust.h b/src/search/landmarks/landmark_factory_rpg_exhaust.h index 4553b04b37..2916c7c8ef 100644 --- a/src/search/landmarks/landmark_factory_rpg_exhaust.h +++ b/src/search/landmarks/landmark_factory_rpg_exhaust.h @@ -6,8 +6,12 @@ namespace landmarks { class LandmarkFactoryRpgExhaust : public LandmarkFactoryRelaxation { const bool use_unary_relaxation; - virtual void generate_relaxed_landmarks(const std::shared_ptr &task, - Exploration &exploration) override; + void generate_goal_landmarks(const TaskProxy &task_proxy); + void generate_all_simple_landmarks( + const TaskProxy &task_proxy, Exploration &exploration) const; + virtual void generate_relaxed_landmarks( + const std::shared_ptr &task, + Exploration &exploration) override; public: explicit LandmarkFactoryRpgExhaust(bool use_unary_relaxation, From 8b410b10a7a37eeacf5ee4449c083cce54a89fe9 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 20 Mar 2025 08:18:16 +0100 Subject: [PATCH 055/108] Refactor computing DTG successors and fix intersection method. --- .../landmark_factory_reasonable_orders_hps.cc | 20 ++-- .../landmarks/landmark_factory_rpg_sasp.cc | 112 ++++++++++-------- .../landmarks/landmark_factory_rpg_sasp.h | 11 +- 3 files changed, 86 insertions(+), 57 deletions(-) diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 4737e11c1b..d1a90a02c5 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -276,13 +276,16 @@ static utils::HashSet get_effects_on_other_variables( return next_effect; } -static void intersect_inplace(utils::HashSet &set, - const utils::HashSet &other) { - for (const FactPair &atom : other) { - if (!set.contains(atom)) { - set.erase(atom); +static utils::HashSet get_intersection( + const utils::HashSet &set1, + const utils::HashSet &set2) { + utils::HashSet intersection; + for (const FactPair &atom : set1) { + if (set2.contains(atom)) { + intersection.insert(atom); } } + return intersection; } utils::HashSet LandmarkFactoryReasonableOrdersHPS::get_shared_effects_of_achievers( @@ -299,7 +302,7 @@ utils::HashSet LandmarkFactoryReasonableOrdersHPS::get_shared_effects_ swap(shared_effects, effect); init = false; } else { - intersect_inplace(shared_effects, effect); + shared_effects = get_intersection(shared_effects, effect); } if (shared_effects.empty()) { @@ -340,10 +343,11 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( if (landmark_a.is_conjunctive) { return false; } - utils::HashSet shared_effect = + utils::HashSet shared_effects = get_shared_effects_of_achievers(atom_a, task_proxy); return ranges::any_of( - shared_effect.begin(), shared_effect.end(), [&](const FactPair &atom) { + shared_effects.begin(), shared_effects.end(), + [&](const FactPair &atom) { const FactProxy &e = variables[atom.var].get_fact(atom.value); return e != a && e != b && e.is_mutex(b); }); diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index e7fb873faa..cd915f14c5 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -19,17 +19,17 @@ using namespace std; using utils::ExitCode; namespace landmarks { -static unordered_map _intersect( - const unordered_map &a, const unordered_map &b) { - if (a.size() > b.size()) - return _intersect(b, a); - unordered_map result; - for (const auto &pair_a : a) { - const auto it_b = b.find(pair_a.first); - if (it_b != b.end() && it_b->second == pair_a.second) - result.insert(pair_a); - } - return result; +/* TODO: Can we combine this with the intersection defined in reasonable + order factory? */ +static unordered_map get_intersection( + const unordered_map &map1, const unordered_map &map2) { + unordered_map intersection; + for (auto [key, value] : map1) { + if (map2.contains(key) && map2.at(key) == value) { + intersection[key] = value; + } + } + return intersection; } LandmarkFactoryRpgSasp::LandmarkFactoryRpgSasp( @@ -39,51 +39,68 @@ LandmarkFactoryRpgSasp::LandmarkFactoryRpgSasp( use_orders(use_orders) { } -void LandmarkFactoryRpgSasp::build_dtg_successors(const TaskProxy &task_proxy) { - // resize data structure +void LandmarkFactoryRpgSasp::resize_dtg_data_structures( + const TaskProxy &task_proxy) { VariablesProxy variables = task_proxy.get_variables(); dtg_successors.resize(variables.size()); - for (VariableProxy var : variables) + for (VariableProxy var : variables) { dtg_successors[var.get_id()].resize(var.get_domain_size()); + } +} - for (OperatorProxy op : task_proxy.get_operators()) { - // build map for precondition - unordered_map precondition_map; - for (FactProxy precondition : op.get_preconditions()) - precondition_map[precondition.get_variable().get_id()] = precondition.get_value(); +void LandmarkFactoryRpgSasp::compute_dtg_successors( + const EffectProxy &effect, + const std::unordered_map &preconditions, + const std::unordered_map &effect_conditions) { + /* If the operator can change the value of `var` from `pre` to + `post`, we insert `post` into `dtg_successors[var][pre]`. */ + auto [var, post] = effect.get_fact().get_pair(); + if (preconditions.contains(var)) { + int pre = preconditions.at(var); + if (effect_conditions.contains(var) && + effect_conditions.at(var) != pre) { + // The precondition conflicts with the effect condition. + return; + } + add_dtg_successor(var, pre, post); + } else if (effect_conditions.contains(var)) { + add_dtg_successor(var, effect_conditions.at(var), post); + } else { + int domain_size = + effect.get_fact().get_variable().get_domain_size(); + for (int pre = 0; pre < domain_size; ++pre) { + add_dtg_successor(var, pre, post); + } + } +} - for (EffectProxy effect : op.get_effects()) { - // build map for effect condition - unordered_map eff_condition; - for (FactProxy effect_condition : effect.get_conditions()) - eff_condition[effect_condition.get_variable().get_id()] = effect_condition.get_value(); +static unordered_map build_conditions_map( + const ConditionsProxy &conditions) { + unordered_map condition_map; + for (FactProxy condition : conditions) { + condition_map[condition.get_variable().get_id()] = + condition.get_value(); + } + return condition_map; +} - // whenever the operator can change the value of a variable from pre to - // post, we insert post into dtg_successors[var_id][pre] - FactProxy effect_fact = effect.get_fact(); - int var_id = effect_fact.get_variable().get_id(); - int post = effect_fact.get_value(); - if (precondition_map.count(var_id)) { - int pre = precondition_map[var_id]; - if (eff_condition.count(var_id) && eff_condition[var_id] != pre) - continue; // confliction pre- and effect condition - add_dtg_successor(var_id, pre, post); - } else { - if (eff_condition.count(var_id)) { - add_dtg_successor(var_id, eff_condition[var_id], post); - } else { - int dom_size = effect_fact.get_variable().get_domain_size(); - for (int pre = 0; pre < dom_size; ++pre) - add_dtg_successor(var_id, pre, post); - } - } +void LandmarkFactoryRpgSasp::build_dtg_successors(const TaskProxy &task_proxy) { + resize_dtg_data_structures(task_proxy); + for (OperatorProxy op : task_proxy.get_operators()) { + unordered_map preconditions = + build_conditions_map(op.get_preconditions()); + for (EffectProxy effect : op.get_effects()) { + unordered_map effect_conditions = + build_conditions_map(effect.get_conditions()); + compute_dtg_successors(effect, preconditions, effect_conditions); } } } void LandmarkFactoryRpgSasp::add_dtg_successor(int var_id, int pre, int post) { - if (pre != post) + if (pre != post) { dtg_successors[var_id][pre].insert(post); + } } /* @@ -153,8 +170,9 @@ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_landmark( if (init) { init = false; intersection = current_cond; - } else - intersection = _intersect(intersection, current_cond); + } else { + intersection = get_intersection(intersection, current_cond); + } } } result.insert(intersection.begin(), intersection.end()); @@ -282,7 +300,7 @@ void LandmarkFactoryRpgSasp::compute_shared_preconditions( init = false; shared_pre = next_pre; } else - shared_pre = _intersect(shared_pre, next_pre); + shared_pre = get_intersection(shared_pre, next_pre); } } } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index adaaf20fb1..09b8d5f1dc 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -14,15 +14,21 @@ namespace landmarks { class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { const bool disjunctive_landmarks; const bool use_orders; + // TODO: Use std::deque instead? std::list open_landmarks; std::vector> disjunction_classes; std::unordered_map> forward_orders; - // dtg_successors[var_id][val] contains all successor values of val in the - // domain transition graph for the variable + /* The entry `dtg_successors[var][val]` contains all successor values of the + atom var->val in the domain transition graph (aka atomic projection). */ std::vector>> dtg_successors; + void resize_dtg_data_structures(const TaskProxy &task_proxy); + void compute_dtg_successors( + const EffectProxy &effect, + const std::unordered_map &preconditions, + const std::unordered_map &effect_conditions); void build_dtg_successors(const TaskProxy &task_proxy); void add_dtg_successor(int var_id, int pre, int post); void find_forward_orders(const VariablesProxy &variables, @@ -55,6 +61,7 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { void approximate_lookahead_orders( const TaskProxy &task_proxy, const std::vector> &reached, LandmarkNode *node); + // TODO: Rename this function. bool domain_connectivity(const State &initial_state, const FactPair &landmark, const std::unordered_set &exclude); From 48ec72556d01a07b1e663a4ae067f57e88e3eff1 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 20 Mar 2025 10:05:11 +0100 Subject: [PATCH 056/108] Break apart approximation of landmark preconditions. --- .../landmarks/landmark_factory_rpg_sasp.cc | 140 ++++++++++-------- .../landmarks/landmark_factory_rpg_sasp.h | 4 - 2 files changed, 81 insertions(+), 63 deletions(-) diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index cd915f14c5..5c88cae4af 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -103,81 +103,102 @@ void LandmarkFactoryRpgSasp::add_dtg_successor(int var_id, int pre, int post) { } } -/* - Compute a subset of the actual preconditions of `op` for achieving `landmark`. - It takes into account operator preconditions, but only reports those effect - conditions that are true for ALL effects achieving the landmark. - TODO: Make this comment clearer. -*/ -void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_landmark( - const TaskProxy &task_proxy, const Landmark &landmark, - const OperatorProxy &op, unordered_map &result) const { - vector has_precondition_on_var(task_proxy.get_variables().size(), false); +// Returns the set of variables occurring in the precondition. +static unordered_set add_preconditions( + const OperatorProxy &op, unordered_map &result) { + unordered_set precondition_variables; for (FactProxy precondition : op.get_preconditions()) { - result.emplace(precondition.get_variable().get_id(), precondition.get_value()); - has_precondition_on_var[precondition.get_variable().get_id()] = true; + result[precondition.get_variable().get_id()] = precondition.get_value(); + precondition_variables.insert(precondition.get_variable().get_id()); } + return precondition_variables; +} - // If there is an effect but no precondition on a variable v with domain - // size 2 and initially the variable has the other value than required by - // the landmark then at the first time the landmark is reached the - // variable must still have the initial value. +/* + For all binary variables, if there is an effect but no precondition on + that variable and if the initial value differs from the value in the + landmark, then the variable still has its initial value right before + it is reached for the first time. +*/ +static void add_binary_variable_conditions( + const TaskProxy &task_proxy, const Landmark &landmark, + const EffectsProxy &effects, + const unordered_set &precondition_variables, + unordered_map &result) { State initial_state = task_proxy.get_initial_state(); - EffectsProxy effects = op.get_effects(); for (EffectProxy effect : effects) { - FactProxy effect_fact = effect.get_fact(); - int var_id = effect_fact.get_variable().get_id(); - if (!has_precondition_on_var[var_id] && - effect_fact.get_variable().get_domain_size() == 2) { + FactProxy effect_atom = effect.get_fact(); + int var_id = effect_atom.get_variable().get_id(); + if (!precondition_variables.contains(var_id) && + effect_atom.get_variable().get_domain_size() == 2) { for (const FactPair &atom : landmark.atoms) { if (atom.var == var_id && initial_state[var_id].get_value() != atom.value) { - result.emplace(var_id, initial_state[var_id].get_value()); + result[var_id] = initial_state[var_id].get_value(); break; } } } } +} - // Check if `landmark` could be achieved by conditional effects. - unordered_set achievable_atom_indices; - for (EffectProxy effect : effects) { - FactProxy effect_fact = effect.get_fact(); - for (size_t j = 0; j < landmark.atoms.size(); ++j) - if (landmark.atoms[j] == effect_fact.get_pair()) - achievable_atom_indices.insert(j); - } - // Intersect effect conditions of all effects that can achieve lmp +static void add_effect_conditions( + const Landmark &landmark, const EffectsProxy &effects, + unordered_map &result) { + // Intersect effect conditions of all effects that can achieve `landmark`. unordered_map intersection; bool init = true; - for (int index : achievable_atom_indices) { - for (EffectProxy effect : effects) { - FactProxy effect_fact = effect.get_fact(); - if (!init && intersection.empty()) - break; - unordered_map current_cond; - if (landmark.atoms[index] == effect_fact.get_pair()) { - EffectConditionsProxy effect_conditions = effect.get_conditions(); - if (effect_conditions.empty()) { - intersection.clear(); - break; - } else { - for (FactProxy effect_condition : effect_conditions) - current_cond.emplace(effect_condition.get_variable().get_id(), - effect_condition.get_value()); - } - } - if (init) { - init = false; - intersection = current_cond; - } else { - intersection = get_intersection(intersection, current_cond); - } + for (const EffectProxy &effect : effects) { + const FactPair &effect_atom = effect.get_fact().get_pair(); + if (!landmark.contains(effect_atom)) { + continue; + } + if (effect.get_conditions().empty()) { + /* Landmark is reached unconditionally, no effect conditions + need to be added. */ + return; + } + + unordered_map effect_condition; + for (FactProxy atom : effect.get_conditions()) { + effect_condition[atom.get_variable().get_id()] = atom.get_value(); + } + if (init) { + swap(intersection, effect_condition); + init = false; + } else { + intersection = get_intersection(intersection, effect_condition); + } + + if (intersection.empty()) { + assert(!init); + break; } } result.insert(intersection.begin(), intersection.end()); } +/* + Collects conditions that must hold in all states in which `op` is + applicable and potentially reaches `landmark` when applied. These are + (1) preconditions of `op`, + (2) inverse values of binary variables if the landmark does not hold + initially, and + (3) shared effect conditions of all conditional effects that achieve + the landmark. +*/ +static unordered_map approximate_preconditions_to_achieve_landmark( + const TaskProxy &task_proxy, const Landmark &landmark, + const OperatorProxy &op) { + unordered_map result; + unordered_set precondition_variables = add_preconditions(op, result); + EffectsProxy effects = op.get_effects(); + add_binary_variable_conditions( + task_proxy, landmark, effects, precondition_variables, result); + add_effect_conditions(landmark, effects, result); + return result; +} + void LandmarkFactoryRpgSasp::found_simple_landmark_and_ordering( const FactPair &atom, LandmarkNode &node, OrderingType type) { if (landmark_graph->contains_simple_landmark(atom)) { @@ -293,9 +314,9 @@ void LandmarkFactoryRpgSasp::compute_shared_preconditions( break; if (possibly_reaches_landmark(op, reached, landmark)) { - unordered_map next_pre; - get_greedy_preconditions_for_landmark(task_proxy, landmark, - op, next_pre); + unordered_map next_pre = + approximate_preconditions_to_achieve_landmark( + task_proxy, landmark, op); if (init) { init = false; shared_pre = next_pre; @@ -401,8 +422,9 @@ void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axiom_ids[i]); if (possibly_reaches_landmark(op, reached, landmark)) { ++num_ops; - unordered_map next_pre; - get_greedy_preconditions_for_landmark(task_proxy, landmark, op, next_pre); + unordered_map next_pre = + approximate_preconditions_to_achieve_landmark( + task_proxy, landmark, op); for (const auto &pre : next_pre) { int disj_class = disjunction_classes[pre.first][pre.second]; if (disj_class == -1) { diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index 09b8d5f1dc..e53ce5323b 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -36,10 +36,6 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { LandmarkNode *node); void add_landmark_forward_orderings(); - void get_greedy_preconditions_for_landmark( - const TaskProxy &task_proxy, const Landmark &landmark, - const OperatorProxy &op, - std::unordered_map &result) const; void compute_shared_preconditions( const TaskProxy &task_proxy, std::unordered_map &shared_pre, From 463c6041e1d9f57721cefd9a55aea6c068883d2a Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 20 Mar 2025 14:58:44 +0100 Subject: [PATCH 057/108] Break apart adding predecessor landmark. --- src/search/landmarks/landmark_factory.cc | 17 +- src/search/landmarks/landmark_factory.h | 2 +- src/search/landmarks/landmark_factory_h_m.cc | 4 +- .../landmarks/landmark_factory_merged.cc | 2 +- .../landmark_factory_reasonable_orders_hps.cc | 4 +- .../landmarks/landmark_factory_rpg_sasp.cc | 202 ++++++++++-------- .../landmarks/landmark_factory_rpg_sasp.h | 18 +- .../landmarks/landmark_factory_zhu_givan.cc | 2 +- 8 files changed, 141 insertions(+), 110 deletions(-) diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index 888ead7622..55c7686568 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -55,12 +55,19 @@ void LandmarkFactory::compute_operators_providing_effect( static bool weaker_ordering_exists( LandmarkNode &from, LandmarkNode &to, OrderingType type) { - auto it = from.children.find(&to); - if (it == from.children.end()) { - return false; + unordered_map::iterator it; + if (from.children.size() < to.parents.size()) { + it = from.children.find(&to); + if (it == from.children.end()) { + return false; + } } else { - return it->second < type; + it = to.parents.find(&from); + if (it == to.parents.end()) { + return false; + } } + return it->second < type; } static void remove_ordering(LandmarkNode &from, LandmarkNode &to) { @@ -83,7 +90,7 @@ void LandmarkFactory::add_ordering( /* Adds an ordering in the landmark graph. If an ordering between the same landmarks is already present, the stronger ordering type wins. */ -void LandmarkFactory::add_ordering_or_replace_if_stronger( +void LandmarkFactory::add_or_replace_ordering_if_stronger( LandmarkNode &from, LandmarkNode &to, OrderingType type) const { // TODO: Understand why self-loops are not allowed. assert(&from != &to); diff --git a/src/search/landmarks/landmark_factory.h b/src/search/landmarks/landmark_factory.h index 67ae4fb984..29aaea2ff9 100644 --- a/src/search/landmarks/landmark_factory.h +++ b/src/search/landmarks/landmark_factory.h @@ -40,7 +40,7 @@ class LandmarkFactory { explicit LandmarkFactory(utils::Verbosity verbosity); - void add_ordering_or_replace_if_stronger( + void add_or_replace_ordering_if_stronger( LandmarkNode &from, LandmarkNode &to, OrderingType type) const; void discard_all_orderings() const; diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 92dd7a4adf..d35dd22011 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -996,12 +996,12 @@ void LandmarkFactoryHM::generate_landmarks( assert(landmark_node_table.find(landmark) != landmark_node_table.end()); assert(landmark_node_table.find(set_index) != landmark_node_table.end()); - add_ordering_or_replace_if_stronger( + add_or_replace_ordering_if_stronger( *landmark_node_table[landmark], *landmark_node_table[set_index], OrderingType::NATURAL); } for (int gn : h_m_table_[set_index].necessary) { - add_ordering_or_replace_if_stronger( + add_or_replace_ordering_if_stronger( *landmark_node_table[gn], *landmark_node_table[set_index], OrderingType::GREEDY_NECESSARY); } diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index 996a5d96b4..58dc70571b 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -129,7 +129,7 @@ void LandmarkFactoryMerged::add_landmark_orderings( LandmarkNode *to_new = get_matching_landmark(to_old->get_landmark()); if (to_new) { - add_ordering_or_replace_if_stronger( + add_or_replace_ordering_if_stronger( *from_new, *to_new, type); } else if (log.is_at_least_normal()) { log << "Discarded to ordering" << endl; diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index d1a90a02c5..a5c1c5bb81 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -49,7 +49,7 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_goal_orderings( continue; } if (interferes(task_proxy, other_landmark, landmark)) { - add_ordering_or_replace_if_stronger( + add_or_replace_ordering_if_stronger( *other, node, OrderingType::REASONABLE); } } @@ -107,7 +107,7 @@ void LandmarkFactoryReasonableOrdersHPS::insert_reasonable_orderings( others as well (transitivity), but it could be interesting to test the effect of doing so, for example for the cycle heuristic. */ - add_ordering_or_replace_if_stronger( + add_or_replace_ordering_if_stronger( *other, node, OrderingType::REASONABLE); } } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 5c88cae4af..405c91e865 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -13,6 +13,7 @@ #include #include +#include #include using namespace std; @@ -199,102 +200,117 @@ static unordered_map approximate_preconditions_to_achieve_landmark( return result; } -void LandmarkFactoryRpgSasp::found_simple_landmark_and_ordering( +/* Remove all pointers to `disjunctive_landmark_node` from internal data + structures (i.e., the list of open landmarks and forward orders). */ +void LandmarkFactoryRpgSasp::remove_occurrences_of_landmark_node( + const LandmarkNode *node) { + auto it = find(open_landmarks.begin(), open_landmarks.end(), node); + if (it != open_landmarks.end()) { + open_landmarks.erase(it); + } + forward_orders.erase(node); +} + +static vector get_natural_parents(const LandmarkNode *node) { + // Retrieve incoming orderings from `disjunctive_landmark_node`. + vector parents; + parents.reserve(node->parents.size()); + assert(all_of(node->parents.begin(), node->parents.end(), + [](const pair &parent) { + return parent.second >= OrderingType::NATURAL; + })); + for (auto &parent : views::keys(node->parents)) { + parents.push_back(parent); + } + return parents; +} + +void LandmarkFactoryRpgSasp::remove_disjunctive_landmark_and_rewire_orderings( + LandmarkNode &simple_landmark_node) { + /* + In issue1004, we fixed a bug in this part of the code. It now + removes the disjunctive landmark along with all its orderings from + the landmark graph and adds a new simple landmark node. Before + this change, incoming orderings were maintained, which is not + always correct for greedy-necessary orderings. We now replace + those incoming orderings with natural orderings. + */ + const Landmark &landmark = simple_landmark_node.get_landmark(); + assert(!landmark.is_conjunctive); + assert(!landmark.is_disjunctive); + assert(landmark.atoms.size() == 1); + LandmarkNode *disjunctive_landmark_node = + &landmark_graph->get_disjunctive_landmark_node(landmark.atoms[0]); + remove_occurrences_of_landmark_node(disjunctive_landmark_node); + vector parents = + get_natural_parents(disjunctive_landmark_node); + landmark_graph->remove_node(disjunctive_landmark_node); + /* Add incoming orderings of replaced `disjunctive_landmark_node` as + natural orderings to `simple_node`. */ + for (LandmarkNode *parent : parents) { + add_or_replace_ordering_if_stronger( + *parent, simple_landmark_node, OrderingType::NATURAL); + } +} + +void LandmarkFactoryRpgSasp::add_simple_landmark_and_ordering( const FactPair &atom, LandmarkNode &node, OrderingType type) { if (landmark_graph->contains_simple_landmark(atom)) { LandmarkNode &simple_landmark = landmark_graph->get_simple_landmark_node(atom); - add_ordering_or_replace_if_stronger(simple_landmark, node, type); + add_or_replace_ordering_if_stronger(simple_landmark, node, type); return; } Landmark landmark({atom}, false, false); + LandmarkNode &simple_landmark_node = + landmark_graph->add_landmark(move (landmark)); + open_landmarks.push_back(&simple_landmark_node); + add_or_replace_ordering_if_stronger(simple_landmark_node, node, type); if (landmark_graph->contains_disjunctive_landmark(atom)) { - // In issue1004, we fixed a bug in this part of the code. It now removes - // the disjunctive landmark along with all its orderings from the - // landmark graph and adds a new simple landmark node. Before this - // change, incoming orderings were maintained, which is not always - // correct for greedy necessary orderings. We now replace those - // incoming orderings with natural orderings. - - // Simple landmarks are more informative than disjunctive ones, - // remove disj. landmark and add simple one - LandmarkNode *disjunctive_landmark_node = - &landmark_graph->get_disjunctive_landmark_node(atom); - - /* Remove all pointers to `disjunctive_landmark_node` from internal data - structures (i.e., the list of open landmarks and forward orders). */ - auto it = find(open_landmarks.begin(), open_landmarks.end(), disjunctive_landmark_node); - if (it != open_landmarks.end()) { - open_landmarks.erase(it); - } - forward_orders.erase(disjunctive_landmark_node); - - // Retrieve incoming orderings from `disjunctive_landmark_node`. - vector predecessors; - predecessors.reserve(disjunctive_landmark_node->parents.size()); - for (auto &pred : disjunctive_landmark_node->parents) { - predecessors.push_back(pred.first); - } - - // Remove `disjunctive_landmark_node` from landmark graph. - landmark_graph->remove_node(disjunctive_landmark_node); - - // Add simple landmark node. - LandmarkNode &simple_landmark = - landmark_graph->add_landmark(move(landmark)); - open_landmarks.push_back(&simple_landmark); - add_ordering_or_replace_if_stronger(simple_landmark, node, type); - - /* Add incoming orderings of replaced `disjunctive_landmark_node` as - natural orderings to `simple_landmark`. */ - for (LandmarkNode *pred : predecessors) { - add_ordering_or_replace_if_stronger( - *pred, simple_landmark, OrderingType::NATURAL); - } - } else { - LandmarkNode &simple_landmark = - landmark_graph->add_landmark(move(landmark)); - open_landmarks.push_back(&simple_landmark); - add_ordering_or_replace_if_stronger(simple_landmark, node, type); + // Simple landmarks are more informative than disjunctive ones. + remove_disjunctive_landmark_and_rewire_orderings(simple_landmark_node); } } -void LandmarkFactoryRpgSasp::found_disjunctive_landmark_and_ordering( - const TaskProxy &task_proxy, const set &atoms, - LandmarkNode &node, OrderingType type) { - bool simple_landmark_exists = false; - State initial_state = task_proxy.get_initial_state(); - for (const FactPair &atom : atoms) { - if (initial_state[atom.var].get_value() == atom.value) { - return; - } - if (landmark_graph->contains_simple_landmark(atom)) { - simple_landmark_exists = true; - break; - } +// Returns true if an overlapping landmark exists already. +bool LandmarkFactoryRpgSasp::deal_with_overlapping_landmarks( + const set &atoms, LandmarkNode &node, OrderingType type) const { + if (ranges::any_of(atoms.begin(), atoms.end(), [&](const FactPair &atom) { + return landmark_graph->contains_simple_landmark(atom); + })) { + /* + Do not add landmark because the simple one is stronger. Do not add the + ordering to the simple landmark(s) as they are not guaranteed to hold. + */ + return true; } - LandmarkNode *new_landmark_node; - if (simple_landmark_exists) { - // Note: don't add orders as we can't be sure that they're correct - return; - } else if (landmark_graph->contains_overlapping_disjunctive_landmark(atoms)) { + if (landmark_graph->contains_overlapping_disjunctive_landmark(atoms)) { if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) { - new_landmark_node = - &landmark_graph->get_disjunctive_landmark_node(*atoms.begin()); - add_ordering_or_replace_if_stronger(*new_landmark_node, node, type); - return; + LandmarkNode &new_landmark_node = + landmark_graph->get_disjunctive_landmark_node(*atoms.begin()); + add_or_replace_ordering_if_stronger(new_landmark_node, node, type); } - // Landmark overlaps with existing disjunctive landmark, do not add. - return; + return true; + } + return false; +} + +void LandmarkFactoryRpgSasp::add_disjunctive_landmark_and_ordering( + const set &atoms, LandmarkNode &node, OrderingType type) { + assert(atoms.size() > 1); + bool overlaps = deal_with_overlapping_landmarks(atoms, node, type); + + /* Only add the landmark to the landmark graph if it does not + overlap with an existing landmark. */ + if (!overlaps) { + Landmark landmark(vector(atoms.begin(), atoms.end()), + true, false); + LandmarkNode *new_landmark_node = + &landmark_graph->add_landmark(move(landmark)); + open_landmarks.push_back(new_landmark_node); + add_or_replace_ordering_if_stronger(*new_landmark_node, node, type); } - /* None of the atoms in this landmark occur in an existing landmark, so - we add the landmark to the landmark graph. */ - Landmark landmark(vector(atoms.begin(), - atoms.end()), true, false); - new_landmark_node = &landmark_graph->add_landmark(move(landmark)); - open_landmarks.push_back(new_landmark_node); - add_ordering_or_replace_if_stronger(*new_landmark_node, node, type); } void LandmarkFactoryRpgSasp::compute_shared_preconditions( @@ -498,10 +514,9 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( All such shared preconditions are landmarks, and greedy necessary predecessors of *landmark*. */ - for (const auto &pre : shared_pre) { - found_simple_landmark_and_ordering( - FactPair(pre.first, pre.second), *node, - OrderingType::GREEDY_NECESSARY); + for (auto [var, value] : shared_pre) { + add_simple_landmark_and_ordering(FactPair(var, value), *node, + OrderingType::GREEDY_NECESSARY); } // Extract additional orders from the relaxed planning graph and DTG. approximate_lookahead_orders(task_proxy, reached, node); @@ -512,10 +527,13 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( task_proxy, disjunctive_pre, reached, landmark); for (const auto &preconditions : disjunctive_pre) // We don't want disjunctive LMs to get too big. - if (preconditions.size() < 5) { - found_disjunctive_landmark_and_ordering( - task_proxy, preconditions, *node, - OrderingType::GREEDY_NECESSARY); + if (preconditions.size() < 5 && none_of(preconditions.begin() + , preconditions.end(), [&](const FactPair &atom) { + // TODO: Why not? + return initial_state[atom.var].get_value() == atom.value; + })) { + add_disjunctive_landmark_and_ordering( + preconditions, *node, OrderingType::GREEDY_NECESSARY); } } } @@ -577,8 +595,8 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orders( initial state, we have found a new landmark. */ if (!domain_connectivity(initial_state, atom, exclude)) - found_simple_landmark_and_ordering(FactPair(atom.var, value), *node, - OrderingType::NATURAL); + add_simple_landmark_and_ordering( + FactPair(atom.var, value), *node, OrderingType::NATURAL); } } @@ -598,7 +616,7 @@ bool LandmarkFactoryRpgSasp::domain_connectivity( // If the value in the initial state is excluded, we won't achieve our goal value: if (exclude.find(initial_state[var].get_value()) != exclude.end()) return false; - list open; + deque open; unordered_set closed(initial_state[var].get_variable().get_domain_size()); closed = exclude; open.push_back(initial_state[var].get_value()); @@ -671,7 +689,7 @@ void LandmarkFactoryRpgSasp::add_landmark_forward_orderings() { if (landmark_graph->contains_simple_landmark(node2_pair)) { LandmarkNode &node2 = landmark_graph->get_simple_landmark_node(node2_pair); - add_ordering_or_replace_if_stronger( + add_or_replace_ordering_if_stronger( *node, node2, OrderingType::NATURAL); } } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index e53ce5323b..1bd5c1ee2c 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -5,7 +5,7 @@ #include "../utils/hash.h" -#include +#include #include #include #include @@ -15,7 +15,7 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { const bool disjunctive_landmarks; const bool use_orders; // TODO: Use std::deque instead? - std::list open_landmarks; + std::deque open_landmarks; std::vector> disjunction_classes; std::unordered_map> forward_orders; @@ -49,11 +49,17 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { virtual void generate_relaxed_landmarks( const std::shared_ptr &task, Exploration &exploration) override; - void found_simple_landmark_and_ordering( + void remove_occurrences_of_landmark_node(const LandmarkNode *node); + void remove_disjunctive_landmark_and_rewire_orderings( + LandmarkNode &simple_landmark_node); + void add_simple_landmark_and_ordering( const FactPair &atom, LandmarkNode &node, OrderingType type); - void found_disjunctive_landmark_and_ordering( - const TaskProxy &task_proxy, const std::set &atoms, - LandmarkNode &node, OrderingType type); + // TODO: Can we use something different than set in the next two? + bool deal_with_overlapping_landmarks( + const std::set &atoms, LandmarkNode &node, + OrderingType type) const; + void add_disjunctive_landmark_and_ordering( + const std::set &atoms, LandmarkNode &node, OrderingType type); void approximate_lookahead_orders( const TaskProxy &task_proxy, const std::vector> &reached, LandmarkNode *node); diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index 6135303e4e..3db491d3fb 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -95,7 +95,7 @@ void LandmarkFactoryZhuGivan::extract_landmarks( // Add order: lm ->_{nat} lm assert(node->parents.find(lm_node) == node->parents.end()); assert(lm_node->children.find(node) == lm_node->children.end()); - add_ordering_or_replace_if_stronger( + add_or_replace_ordering_if_stronger( *node, *lm_node, OrderingType::NATURAL); } } From 5a543ee163c60c95e46a175e437960aa37b4abc5 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 20 Mar 2025 16:14:55 +0100 Subject: [PATCH 058/108] Clean up computing shared preconditions. --- .../landmarks/landmark_factory_rpg_sasp.cc | 41 ++++++++++--------- .../landmarks/landmark_factory_rpg_sasp.h | 11 +++-- 2 files changed, 28 insertions(+), 24 deletions(-) diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 405c91e865..baff1b576d 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -313,34 +313,36 @@ void LandmarkFactoryRpgSasp::add_disjunctive_landmark_and_ordering( } } -void LandmarkFactoryRpgSasp::compute_shared_preconditions( - const TaskProxy &task_proxy, unordered_map &shared_pre, - vector> &reached, const Landmark &landmark) { - /* - Compute the shared preconditions of all operators that can potentially - achieve landmark bp, given the reachability in the relaxed planning graph. - */ +/* Compute the shared preconditions of all operators that can potentially + achieve `landmark`, given the reachability in the relaxed planning graph. */ +unordered_map LandmarkFactoryRpgSasp::compute_shared_preconditions( + const TaskProxy &task_proxy, const Landmark &landmark, + const vector> &reached) const { + unordered_map shared_preconditions; bool init = true; for (const FactPair &atom : landmark.atoms) { const vector &op_ids = get_operators_including_effect(atom); - for (int op_or_axiom_id : op_ids) { - OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axiom_id); - if (!init && shared_pre.empty()) - break; - + OperatorProxy op = + get_operator_or_axiom(task_proxy, op_or_axiom_id); if (possibly_reaches_landmark(op, reached, landmark)) { - unordered_map next_pre = + unordered_map preconditions = approximate_preconditions_to_achieve_landmark( task_proxy, landmark, op); if (init) { + swap(shared_preconditions, preconditions); init = false; - shared_pre = next_pre; - } else - shared_pre = get_intersection(shared_pre, next_pre); + } else { + shared_preconditions = + get_intersection(shared_preconditions, preconditions); + } + if (shared_preconditions.empty()) { + return shared_preconditions; + } } } } + return shared_preconditions; } static string get_predicate_for_atom(const VariablesProxy &variables, @@ -507,14 +509,13 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( any precondition propositions that all such operators share (if there are any). */ - unordered_map shared_pre; - compute_shared_preconditions(task_proxy, shared_pre, - reached, landmark); + unordered_map shared_preconditions = + compute_shared_preconditions(task_proxy, landmark, reached); /* All such shared preconditions are landmarks, and greedy necessary predecessors of *landmark*. */ - for (auto [var, value] : shared_pre) { + for (auto [var, value] : shared_preconditions) { add_simple_landmark_and_ordering(FactPair(var, value), *node, OrderingType::GREEDY_NECESSARY); } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index 1bd5c1ee2c..d282a424b1 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -36,10 +36,13 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { LandmarkNode *node); void add_landmark_forward_orderings(); - void compute_shared_preconditions( - const TaskProxy &task_proxy, - std::unordered_map &shared_pre, - std::vector> &reached, const Landmark &landmark); + void update_shared_preconditions( + const TaskProxy &task_proxy, const Landmark &landmark, + const OperatorProxy &op, + std::unordered_map &shared_preconditions) const; + std::unordered_map compute_shared_preconditions( + const TaskProxy &task_proxy, const Landmark &landmark, + const std::vector> &reached) const; void compute_disjunctive_preconditions( const TaskProxy &task_proxy, std::vector> &disjunctive_pre, From 2f1d3a6b84313b1cd237a7a1e84e243ab2a25f45 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 20 Mar 2025 16:56:45 +0100 Subject: [PATCH 059/108] Clean up building disjunction classes. --- .../landmarks/landmark_factory_rpg_sasp.cc | 71 ++++++++++--------- .../landmarks/landmark_factory_rpg_sasp.h | 2 +- 2 files changed, 37 insertions(+), 36 deletions(-) diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index baff1b576d..504349214f 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -346,10 +346,11 @@ unordered_map LandmarkFactoryRpgSasp::compute_shared_preconditions( } static string get_predicate_for_atom(const VariablesProxy &variables, - int var_no, int value) { - const string atom_name = variables[var_no].get_fact(value).get_name(); - if (atom_name == "") + int var_id, int value) { + const string atom_name = variables[var_id].get_fact(value).get_name(); + if (atom_name == "") { return ""; + } int predicate_pos = 0; if (atom_name.substr(0, 5) == "Atom ") { predicate_pos = 5; @@ -361,34 +362,33 @@ static string get_predicate_for_atom(const VariablesProxy &variables, cerr << "Cannot extract predicate from atom: " << atom_name << endl; utils::exit_with(ExitCode::SEARCH_INPUT_ERROR); } - return string(atom_name.begin() + - predicate_pos, atom_name.begin() + paren_pos); + return { + atom_name.begin() + predicate_pos, + atom_name.begin() + static_cast(paren_pos) + }; } +/* + The RHW landmark generation method only allows disjunctive landmarks where all + atoms stem from the same PDDL predicate. This functionality is implemented in + this method. + + The approach we use is to map each atom (var/value pair) to an equivalence + class (representing all atoms with the same predicate). The special class "-1" + means "cannot be part of any disjunctive landmark". This is used for atoms + that do not belong to any predicate. + + Similar methods for restricting disjunctive landmarks could be implemented by + just changing this function, as long as the restriction could also be + implemented as an equivalence class. For example, issue384 suggests to simply + use the finite-domain variable ID as the equivalence class, which would be a + cleaner method than what we currently use since it doesn't care about where + the finite-domain representation comes from. (But of course making such a + change would require a performance evaluation.) +*/ void LandmarkFactoryRpgSasp::build_disjunction_classes( const TaskProxy &task_proxy) { - /* The RHW landmark generation method only allows disjunctive - landmarks where all atoms stem from the same PDDL predicate. - This functionality is implemented via this method. - - The approach we use is to map each atom (var/value pair) to an - equivalence class (representing all atoms with the same - predicate). The special class "-1" means "cannot be part of any - disjunctive landmark". This is used for atoms that do not - belong to any predicate. - - Similar methods for restricting disjunctive landmarks could be - implemented by just changing this function, as long as the - restriction could also be implemented as an equivalence class. - For example, we might simply use the finite-domain variable - number as the equivalence class, which would be a cleaner - method than what we currently use since it doesn't care about - where the finite-domain representation comes from. (But of - course making such a change would require a performance - evaluation.) - */ - - typedef map PredicateIndex; + typedef unordered_map PredicateIndex; PredicateIndex predicate_to_index; VariablesProxy variables = task_proxy.get_variables(); @@ -397,17 +397,18 @@ void LandmarkFactoryRpgSasp::build_disjunction_classes( int num_values = var.get_domain_size(); disjunction_classes[var.get_id()].reserve(num_values); for (int value = 0; value < num_values; ++value) { - string predicate = get_predicate_for_atom(variables, var.get_id(), value); - int disj_class; + string predicate = + get_predicate_for_atom(variables, var.get_id(), value); + int disjunction_class; if (predicate.empty()) { - disj_class = -1; + disjunction_class = -1; } else { - // Insert predicate into unordered_map or extract value that - // is already there. + /* Insert predicate into unordered_map or extract value + that is already there. */ pair entry(predicate, predicate_to_index.size()); - disj_class = predicate_to_index.insert(entry).first->second; + disjunction_class = predicate_to_index.insert(entry).first->second; } - disjunction_classes[var.get_id()].push_back(disj_class); + disjunction_classes[var.get_id()].push_back(disjunction_class); } } } @@ -415,7 +416,7 @@ void LandmarkFactoryRpgSasp::build_disjunction_classes( void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( const TaskProxy &task_proxy, vector> &disjunctive_pre, - vector> &reached, const Landmark &landmark) { + vector> &reached, const Landmark &landmark) const { /* Compute disjunctive preconditions from all operators than can potentially achieve landmark bp, given the reachability in the relaxed planning graph. diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index d282a424b1..d3dc5753ee 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -47,7 +47,7 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { const TaskProxy &task_proxy, std::vector> &disjunctive_pre, std::vector> &reached, - const Landmark &landmark); + const Landmark &landmark) const; virtual void generate_relaxed_landmarks( const std::shared_ptr &task, From 134aeaed0590eb03edbac0861e8c3a912a880f70 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 20 Mar 2025 18:11:10 +0100 Subject: [PATCH 060/108] Try to understand computation of disjunction classes. --- .../landmarks/landmark_factory_rpg_sasp.cc | 144 +++++++++++------- .../landmarks/landmark_factory_rpg_sasp.h | 16 +- 2 files changed, 94 insertions(+), 66 deletions(-) diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 504349214f..2b5fa482fe 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -2,6 +2,7 @@ #include "exploration.h" #include "landmark.h" +#include "landmark_factory.h" #include "landmark_graph.h" #include "util.h" @@ -413,64 +414,89 @@ void LandmarkFactoryRpgSasp::build_disjunction_classes( } } -void LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( - const TaskProxy &task_proxy, - vector> &disjunctive_pre, - vector> &reached, const Landmark &landmark) const { - /* - Compute disjunctive preconditions from all operators than can potentially - achieve landmark bp, given the reachability in the relaxed planning graph. - A disj. precondition is a set of atoms which contains one precondition - atom from each of the operators, which we additionally restrict so that - each atom in the set stems from the same PDDL predicate. - */ - - vector op_or_axiom_ids; +vector LandmarkFactoryRpgSasp::get_operators_achieving_landmark( + const Landmark &landmark) const { + unordered_set op_ids; for (const FactPair &atom : landmark.atoms) { - const vector &tmp_op_or_axiom_ids = - get_operators_including_effect(atom); - for (int op_or_axiom_id : tmp_op_or_axiom_ids) - op_or_axiom_ids.push_back(op_or_axiom_id); + const vector &tmp_op_ids = get_operators_including_effect(atom); + op_ids.insert(tmp_op_ids.begin(), tmp_op_ids.end()); + } + return {op_ids.begin(), op_ids.end()}; +} + +void LandmarkFactoryRpgSasp::extend_disjunction_class_lookups( + const unordered_map &landmark_preconditions, int op_id, + unordered_map> &preconditions_by_disjunction_class, + unordered_map> &used_operators_by_disjunction_class) const { + for (const auto &[var, value] : landmark_preconditions) { + int disjunction_class = disjunction_classes[var][value]; + if (disjunction_class == -1) { + /* This atom may not participate in any disjunctive + landmarks since it has no associated predicate. */ + continue; + } + + /* Only deal with propositions that are not shared preconditions + (which have been found already and are simple landmarks). */ + FactPair precondition(var, value); + if (!landmark_graph->contains_simple_landmark(precondition)) { + preconditions_by_disjunction_class[disjunction_class].push_back(precondition); + used_operators_by_disjunction_class[disjunction_class].insert(op_id); + } + } +} + +static vector> get_disjunctive_preconditions( + const unordered_map> &preconditions_by_disjunction_class, + const unordered_map> &used_operators_by_disjunction_class, + int num_ops) { + vector> disjunctive_preconditions; + for (const auto &[disjunction_class, atoms] : preconditions_by_disjunction_class) { + int used_operators = static_cast( + used_operators_by_disjunction_class.at(disjunction_class).size()); + if (used_operators == num_ops) { + set preconditions; + preconditions.insert(atoms.begin(), atoms.end()); + if (preconditions.size() > 1) { + disjunctive_preconditions.push_back(preconditions); + } // Otherwise this landmark is not actually a disjunctive landmark. + } } + return disjunctive_preconditions; +} + +/* + Compute disjunctive preconditions from all operators than can potentially + achieve `landmark`, given the reachability in the relaxed planning graph. + A disjunctive precondition is a set of atoms which contains one precondition + atom from each of the operators, which we additionally restrict so that + each atom in the set stems from the same disjunction class. +*/ +vector> LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( + const TaskProxy &task_proxy, const Landmark &landmark, + const vector> &reached) const { + vector op_or_axiom_ids = + get_operators_achieving_landmark(landmark); int num_ops = 0; - unordered_map> preconditions; // maps from - // pddl_proposition_indeces to props - unordered_map> used_operators; // tells for each - // proposition which operators use it - for (size_t i = 0; i < op_or_axiom_ids.size(); ++i) { - OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axiom_ids[i]); + unordered_map> preconditions_by_disjunction_class; + unordered_map> used_operators_by_disjunction_class; + for (int op_id : op_or_axiom_ids) { + const OperatorProxy &op = + get_operator_or_axiom(task_proxy, op_id); if (possibly_reaches_landmark(op, reached, landmark)) { ++num_ops; - unordered_map next_pre = + unordered_map landmark_preconditions = approximate_preconditions_to_achieve_landmark( task_proxy, landmark, op); - for (const auto &pre : next_pre) { - int disj_class = disjunction_classes[pre.first][pre.second]; - if (disj_class == -1) { - /* This atom may not participate in any disjunctive - landmarks since it has no associated predicate. */ - continue; - } - - // Only deal with propositions that are not shared preconditions - // (those have been found already and are simple landmarks). - const FactPair precondition(pre.first, pre.second); - if (!landmark_graph->contains_simple_landmark(precondition)) { - preconditions[disj_class].push_back(precondition); - used_operators[disj_class].insert(i); - } - } - } - } - for (const auto &pre : preconditions) { - if (static_cast(used_operators[pre.first].size()) == num_ops) { - set pre_set; - pre_set.insert(pre.second.begin(), pre.second.end()); - if (pre_set.size() > 1) { // otherwise this LM is not actually a disjunctive LM - disjunctive_pre.push_back(pre_set); - } + extend_disjunction_class_lookups( + landmark_preconditions, op_id, + preconditions_by_disjunction_class, + used_operators_by_disjunction_class); } } + return get_disjunctive_preconditions( + preconditions_by_disjunction_class, used_operators_by_disjunction_class, + num_ops); } void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( @@ -524,16 +550,18 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( approximate_lookahead_orders(task_proxy, reached, node); // Process achieving operators again to find disjunctive LMs - vector> disjunctive_pre; - compute_disjunctive_preconditions( - task_proxy, disjunctive_pre, reached, landmark); - for (const auto &preconditions : disjunctive_pre) + vector> disjunctive_preconditions = + compute_disjunctive_preconditions( + task_proxy, landmark, reached); + for (const auto &preconditions : disjunctive_preconditions) // We don't want disjunctive LMs to get too big. - if (preconditions.size() < 5 && none_of(preconditions.begin() - , preconditions.end(), [&](const FactPair &atom) { - // TODO: Why not? - return initial_state[atom.var].get_value() == atom.value; - })) { + if (preconditions.size() < 5 && ranges::none_of( + preconditions.begin(), preconditions.end(), + [&](const FactPair &atom) { + // TODO: Why not? + return initial_state[atom.var].get_value() == + atom.value; + })) { add_disjunctive_landmark_and_ordering( preconditions, *node, OrderingType::GREEDY_NECESSARY); } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index d3dc5753ee..9d49dca435 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -36,18 +36,18 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { LandmarkNode *node); void add_landmark_forward_orderings(); - void update_shared_preconditions( - const TaskProxy &task_proxy, const Landmark &landmark, - const OperatorProxy &op, - std::unordered_map &shared_preconditions) const; std::unordered_map compute_shared_preconditions( const TaskProxy &task_proxy, const Landmark &landmark, const std::vector> &reached) const; - void compute_disjunctive_preconditions( - const TaskProxy &task_proxy, - std::vector> &disjunctive_pre, - std::vector> &reached, + std::vector get_operators_achieving_landmark( const Landmark &landmark) const; + void extend_disjunction_class_lookups( + const std::unordered_map &landmark_preconditions, int op_id, + std::unordered_map> &preconditions_by_disjunction_class, + std::unordered_map> &used_operators_by_disjunction_class) const; + std::vector> compute_disjunctive_preconditions( + const TaskProxy &task_proxy, const Landmark &landmark, + const std::vector> &reached) const; virtual void generate_relaxed_landmarks( const std::shared_ptr &task, From e6dbeec50c0297c1050fc5371c9b0c78655bc9df Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Fri, 21 Mar 2025 15:57:20 +0100 Subject: [PATCH 061/108] Clean up relaxed landmark generation. --- .../landmarks/landmark_factory_rpg_exhaust.cc | 2 +- .../landmarks/landmark_factory_rpg_exhaust.h | 2 +- .../landmarks/landmark_factory_rpg_sasp.cc | 163 ++++++++++-------- .../landmarks/landmark_factory_rpg_sasp.h | 15 +- 4 files changed, 104 insertions(+), 78 deletions(-) diff --git a/src/search/landmarks/landmark_factory_rpg_exhaust.cc b/src/search/landmarks/landmark_factory_rpg_exhaust.cc index 7e5ef498c1..d237ba2a76 100644 --- a/src/search/landmarks/landmark_factory_rpg_exhaust.cc +++ b/src/search/landmarks/landmark_factory_rpg_exhaust.cc @@ -42,7 +42,7 @@ static bool relaxed_task_solvable( } void LandmarkFactoryRpgExhaust::generate_goal_landmarks( - const TaskProxy &task_proxy) { + const TaskProxy &task_proxy) const { for (FactProxy goal : task_proxy.get_goals()) { Landmark landmark({goal.get_pair()}, false, false, true); landmark_graph->add_landmark(move(landmark)); diff --git a/src/search/landmarks/landmark_factory_rpg_exhaust.h b/src/search/landmarks/landmark_factory_rpg_exhaust.h index 2916c7c8ef..9a57f0423c 100644 --- a/src/search/landmarks/landmark_factory_rpg_exhaust.h +++ b/src/search/landmarks/landmark_factory_rpg_exhaust.h @@ -6,7 +6,7 @@ namespace landmarks { class LandmarkFactoryRpgExhaust : public LandmarkFactoryRelaxation { const bool use_unary_relaxation; - void generate_goal_landmarks(const TaskProxy &task_proxy); + void generate_goal_landmarks(const TaskProxy &task_proxy) const; void generate_all_simple_landmarks( const TaskProxy &task_proxy, Exploration &exploration) const; virtual void generate_relaxed_landmarks( diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 2b5fa482fe..feff0e600e 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -245,6 +245,7 @@ void LandmarkFactoryRpgSasp::remove_disjunctive_landmark_and_rewire_orderings( remove_occurrences_of_landmark_node(disjunctive_landmark_node); vector parents = get_natural_parents(disjunctive_landmark_node); + assert(use_orders || parents.empty()); landmark_graph->remove_node(disjunctive_landmark_node); /* Add incoming orderings of replaced `disjunctive_landmark_node` as natural orderings to `simple_node`. */ @@ -257,9 +258,11 @@ void LandmarkFactoryRpgSasp::remove_disjunctive_landmark_and_rewire_orderings( void LandmarkFactoryRpgSasp::add_simple_landmark_and_ordering( const FactPair &atom, LandmarkNode &node, OrderingType type) { if (landmark_graph->contains_simple_landmark(atom)) { - LandmarkNode &simple_landmark = - landmark_graph->get_simple_landmark_node(atom); - add_or_replace_ordering_if_stronger(simple_landmark, node, type); + if (use_orders) { + LandmarkNode &simple_landmark = + landmark_graph->get_simple_landmark_node(atom); + add_or_replace_ordering_if_stronger(simple_landmark, node, type); + } return; } @@ -267,7 +270,9 @@ void LandmarkFactoryRpgSasp::add_simple_landmark_and_ordering( LandmarkNode &simple_landmark_node = landmark_graph->add_landmark(move (landmark)); open_landmarks.push_back(&simple_landmark_node); - add_or_replace_ordering_if_stronger(simple_landmark_node, node, type); + if (use_orders) { + add_or_replace_ordering_if_stronger(simple_landmark_node, node, type); + } if (landmark_graph->contains_disjunctive_landmark(atom)) { // Simple landmarks are more informative than disjunctive ones. remove_disjunctive_landmark_and_rewire_orderings(simple_landmark_node); @@ -290,7 +295,10 @@ bool LandmarkFactoryRpgSasp::deal_with_overlapping_landmarks( if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) { LandmarkNode &new_landmark_node = landmark_graph->get_disjunctive_landmark_node(*atoms.begin()); - add_or_replace_ordering_if_stronger(new_landmark_node, node, type); + if (use_orders) { + add_or_replace_ordering_if_stronger( + new_landmark_node, node, type); + } } return true; } @@ -310,7 +318,9 @@ void LandmarkFactoryRpgSasp::add_disjunctive_landmark_and_ordering( LandmarkNode *new_landmark_node = &landmark_graph->add_landmark(move(landmark)); open_landmarks.push_back(new_landmark_node); - add_or_replace_ordering_if_stronger(*new_landmark_node, node, type); + if (use_orders) { + add_or_replace_ordering_if_stronger(*new_landmark_node, node, type); + } } } @@ -319,6 +329,7 @@ void LandmarkFactoryRpgSasp::add_disjunctive_landmark_and_ordering( unordered_map LandmarkFactoryRpgSasp::compute_shared_preconditions( const TaskProxy &task_proxy, const Landmark &landmark, const vector> &reached) const { + // TODO: Could this be a set of FactPair instead? unordered_map shared_preconditions; bool init = true; for (const FactPair &atom : landmark.atoms) { @@ -426,8 +437,8 @@ vector LandmarkFactoryRpgSasp::get_operators_achieving_landmark( void LandmarkFactoryRpgSasp::extend_disjunction_class_lookups( const unordered_map &landmark_preconditions, int op_id, - unordered_map> &preconditions_by_disjunction_class, - unordered_map> &used_operators_by_disjunction_class) const { + unordered_map> &preconditions, + unordered_map> &used_operators) const { for (const auto &[var, value] : landmark_preconditions) { int disjunction_class = disjunction_classes[var][value]; if (disjunction_class == -1) { @@ -440,8 +451,8 @@ void LandmarkFactoryRpgSasp::extend_disjunction_class_lookups( (which have been found already and are simple landmarks). */ FactPair precondition(var, value); if (!landmark_graph->contains_simple_landmark(precondition)) { - preconditions_by_disjunction_class[disjunction_class].push_back(precondition); - used_operators_by_disjunction_class[disjunction_class].insert(op_id); + preconditions[disjunction_class].push_back(precondition); + used_operators[disjunction_class].insert(op_id); } } } @@ -495,25 +506,57 @@ vector> LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( } } return get_disjunctive_preconditions( - preconditions_by_disjunction_class, used_operators_by_disjunction_class, - num_ops); + preconditions_by_disjunction_class, + used_operators_by_disjunction_class, num_ops); } -void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( - const shared_ptr &task, Exploration &exploration) { - TaskProxy task_proxy(*task); - if (log.is_at_least_normal()) { - log << "Generating landmarks using the RPG/SAS+ approach" << endl; - } - build_dtg_successors(task_proxy); - build_disjunction_classes(task_proxy); - +void LandmarkFactoryRpgSasp::generate_goal_landmarks( + const TaskProxy &task_proxy) { for (FactProxy goal : task_proxy.get_goals()) { Landmark landmark({goal.get_pair()}, false, false, true); LandmarkNode &node = landmark_graph->add_landmark(move(landmark)); open_landmarks.push_back(&node); } +} +void LandmarkFactoryRpgSasp::generate_shared_precondition_landmarks( + const TaskProxy &task_proxy, const Landmark &landmark, + LandmarkNode *node, const vector> &reached) { + unordered_map shared_preconditions = + compute_shared_preconditions(task_proxy, landmark, reached); + /* All shared preconditions are landmarks, and greedy-necessary + predecessors of `landmark`. */ + for (auto [var, value] : shared_preconditions) { + add_simple_landmark_and_ordering( + FactPair(var, value), *node, OrderingType::GREEDY_NECESSARY); + } +} + +void LandmarkFactoryRpgSasp::generate_disjunctive_precondition_landmarks( + const TaskProxy &task_proxy, const State &initial_state, + const Landmark &landmark, LandmarkNode *node, + const vector> &reached) { + vector> disjunctive_preconditions = + compute_disjunctive_preconditions(task_proxy, landmark, reached); + for (const auto &preconditions : disjunctive_preconditions) { + /* We don't want disjunctive landmarks to get too big. Also, + they should not hold in the initial state. */ + if (preconditions.size() < 5 && ranges::none_of( + preconditions.begin(), preconditions.end(), + [&](const FactPair &atom) { + /* TODO: Is there a good reason why not? We allow + simple landmarks to hold in the initial state. */ + return initial_state[atom.var].get_value() == + atom.value; + })) { + add_disjunctive_landmark_and_ordering( + preconditions, *node, OrderingType::GREEDY_NECESSARY); + } + } +} + +void LandmarkFactoryRpgSasp::generate_backchaining_landmarks( + const TaskProxy &task_proxy, Exploration &exploration) { State initial_state = task_proxy.get_initial_state(); while (!open_landmarks.empty()) { LandmarkNode *node = open_landmarks.front(); @@ -521,65 +564,38 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( open_landmarks.pop_front(); assert(forward_orders[node].empty()); - if (!landmark.is_true_in_state(initial_state)) { - /* - Backchain from *landmark* and compute greedy necessary - predecessors. - Firstly, collect which propositions can be reached without - achieving the landmark. - */ - vector> reached = - exploration.compute_relaxed_reachability(landmark.atoms, false); - /* - Use this information to determine all operators that can - possibly achieve *landmark* for the first time, and collect - any precondition propositions that all such operators share - (if there are any). - */ - unordered_map shared_preconditions = - compute_shared_preconditions(task_proxy, landmark, reached); - /* - All such shared preconditions are landmarks, and greedy - necessary predecessors of *landmark*. - */ - for (auto [var, value] : shared_preconditions) { - add_simple_landmark_and_ordering(FactPair(var, value), *node, - OrderingType::GREEDY_NECESSARY); - } - // Extract additional orders from the relaxed planning graph and DTG. - approximate_lookahead_orders(task_proxy, reached, node); - - // Process achieving operators again to find disjunctive LMs - vector> disjunctive_preconditions = - compute_disjunctive_preconditions( - task_proxy, landmark, reached); - for (const auto &preconditions : disjunctive_preconditions) - // We don't want disjunctive LMs to get too big. - if (preconditions.size() < 5 && ranges::none_of( - preconditions.begin(), preconditions.end(), - [&](const FactPair &atom) { - // TODO: Why not? - return initial_state[atom.var].get_value() == - atom.value; - })) { - add_disjunctive_landmark_and_ordering( - preconditions, *node, OrderingType::GREEDY_NECESSARY); - } + if (landmark.is_true_in_state(initial_state)) { + continue; } + vector> reached = + exploration.compute_relaxed_reachability(landmark.atoms, false); + generate_shared_precondition_landmarks( + task_proxy, landmark, node, reached); + approximate_lookahead_orders(task_proxy, reached, node); + generate_disjunctive_precondition_landmarks( + task_proxy, initial_state, landmark, node, reached); } - add_landmark_forward_orderings(); +} +void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( + const shared_ptr &task, Exploration &exploration) { + TaskProxy task_proxy(*task); + if (log.is_at_least_normal()) { + log << "Generating landmarks using the RPG/SAS+ approach" << endl; + } + build_dtg_successors(task_proxy); + build_disjunction_classes(task_proxy); + generate_goal_landmarks(task_proxy); + generate_backchaining_landmarks(task_proxy, exploration); + if (use_orders) { + add_landmark_forward_orderings(); + } if (!disjunctive_landmarks) { discard_disjunctive_landmarks(); } - - /* TODO: Ensure that landmark orderings are not even added if - `use_orders` is false. */ - if (!use_orders) { - discard_all_orderings(); - } } +// Extract orderings from the relaxed planning and domain transition graph. void LandmarkFactoryRpgSasp::approximate_lookahead_orders( const TaskProxy &task_proxy, const vector> &reached, LandmarkNode *node) { @@ -624,9 +640,10 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orders( If that value is crucial for achieving the LM from the initial state, we have found a new landmark. */ - if (!domain_connectivity(initial_state, atom, exclude)) + if (!domain_connectivity(initial_state, atom, exclude)) { add_simple_landmark_and_ordering( FactPair(atom.var, value), *node, OrderingType::NATURAL); + } } } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index 9d49dca435..bc61adcb7c 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -14,7 +14,6 @@ namespace landmarks { class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { const bool disjunctive_landmarks; const bool use_orders; - // TODO: Use std::deque instead? std::deque open_landmarks; std::vector> disjunction_classes; @@ -43,12 +42,22 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { const Landmark &landmark) const; void extend_disjunction_class_lookups( const std::unordered_map &landmark_preconditions, int op_id, - std::unordered_map> &preconditions_by_disjunction_class, - std::unordered_map> &used_operators_by_disjunction_class) const; + std::unordered_map> &preconditions, + std::unordered_map> &used_operators) const; std::vector> compute_disjunctive_preconditions( const TaskProxy &task_proxy, const Landmark &landmark, const std::vector> &reached) const; + void generate_goal_landmarks(const TaskProxy &task_proxy); + void generate_shared_precondition_landmarks( + const TaskProxy &task_proxy, const Landmark &landmark, + LandmarkNode *node, const std::vector> &reached); + void generate_disjunctive_precondition_landmarks( + const TaskProxy &task_proxy, const State &initial_state, + const Landmark &landmark, LandmarkNode *node, + const std::vector> &reached); + void generate_backchaining_landmarks( + const TaskProxy &task_proxy, Exploration &exploration); virtual void generate_relaxed_landmarks( const std::shared_ptr &task, Exploration &exploration) override; From c03889a5269cae7264b9bc969b947de9fcfd1d6c Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Mon, 24 Mar 2025 14:56:02 +0100 Subject: [PATCH 062/108] Clean up approximation of lookahead orderings. --- .../landmarks/landmark_factory_rpg_sasp.cc | 237 +++++++++--------- .../landmarks/landmark_factory_rpg_sasp.h | 20 +- 2 files changed, 125 insertions(+), 132 deletions(-) diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index feff0e600e..448a8c6e61 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -209,7 +209,7 @@ void LandmarkFactoryRpgSasp::remove_occurrences_of_landmark_node( if (it != open_landmarks.end()) { open_landmarks.erase(it); } - forward_orders.erase(node); + forward_orderings.erase(node); } static vector get_natural_parents(const LandmarkNode *node) { @@ -562,7 +562,7 @@ void LandmarkFactoryRpgSasp::generate_backchaining_landmarks( LandmarkNode *node = open_landmarks.front(); Landmark &landmark = node->get_landmark(); open_landmarks.pop_front(); - assert(forward_orders[node].empty()); + assert(forward_orderings[node].empty()); if (landmark.is_true_in_state(initial_state)) { continue; @@ -571,7 +571,7 @@ void LandmarkFactoryRpgSasp::generate_backchaining_landmarks( exploration.compute_relaxed_reachability(landmark.atoms, false); generate_shared_precondition_landmarks( task_proxy, landmark, node, reached); - approximate_lookahead_orders(task_proxy, reached, node); + approximate_lookahead_orderings(task_proxy, reached, node); generate_disjunctive_precondition_landmarks( task_proxy, initial_state, landmark, node, reached); } @@ -595,144 +595,137 @@ void LandmarkFactoryRpgSasp::generate_relaxed_landmarks( } } +/* + Tests whether in the domain transition graph represented by `successors`, + there is a path from `init_value` to `goal_value`, without passing through + `excluded_value` or any unreachable value according to the relaxed planning + graph. If this is not possible, that means `excluded_value` is crucial to + achieve `goal_value`. +*/ +static bool value_critical_to_reach_landmark( + int init_value, int landmark_value, int excluded_value, + const vector &reached, const vector> &successors) { + assert(landmark_value != init_value); + assert(reached[landmark_value]); + if (excluded_value == init_value) { + return true; + } + deque open; + unordered_set closed(reached.size()); + open.push_back(init_value); + closed.insert(init_value); + while (!open.empty()) { + int value = open.front(); + open.pop_front(); + for (int succ : successors[value]) { + if (!reached[succ]) { + /* Values unreached in the delete relaxation cannot be landmarks + for `landmark_value` even if they are reachable in the DTG. */ + continue; + } + if (succ == landmark_value) { + return false; + } + if (!closed.contains(succ)) { + open.push_back(succ); + closed.insert(succ); + } + } + } + return true; +} + +static vector get_critical_dtg_predecessors( + int init_value, int landmark_value, const vector &reached, + const vector> &successors) { + assert(reached[landmark_value]); + int domain_size = static_cast(reached.size()); + vector critical; + critical.reserve(domain_size); + for (int value = 0; value < domain_size; ++value) { + if (value != landmark_value && reached[value] && + value_critical_to_reach_landmark(init_value, landmark_value, + value, reached, successors)) { + critical.push_back(value); + } + } + return critical; +} + // Extract orderings from the relaxed planning and domain transition graph. -void LandmarkFactoryRpgSasp::approximate_lookahead_orders( +void LandmarkFactoryRpgSasp::approximate_lookahead_orderings( const TaskProxy &task_proxy, const vector> &reached, LandmarkNode *node) { - /* - Find all var-val pairs that can only be reached after the landmark - (according to relaxed plan graph as captured in reached). - The result is saved in the node member variable forward_orders, and - will be used later, when the phase of finding LMs has ended (because - at the moment we don't know which of these var-val pairs will be LMs). - */ - VariablesProxy variables = task_proxy.get_variables(); - find_forward_orders(variables, reached, node); - - /* Use domain transition graphs to find further orders. Only possible - if landmark is simple. */ + const VariablesProxy &variables = task_proxy.get_variables(); const Landmark &landmark = node->get_landmark(); - if (landmark.is_disjunctive) - return; - const FactPair &atom = landmark.atoms[0]; - - /* - Collect in *unreached* all values of the LM variable that cannot be - reached before the LM value (in the relaxed plan graph). - */ - int domain_size = variables[atom.var].get_domain_size(); - unordered_set unreached(domain_size); - for (int value = 0; value < domain_size; ++value) - if (!reached[atom.var][value] && atom.value != value) - unreached.insert(value); - /* - The set *exclude* will contain all those values of the LM variable that - cannot be reached before the LM value (as in *unreached*) PLUS - one value that CAN be reached. - */ - State initial_state = task_proxy.get_initial_state(); - for (int value = 0; value < domain_size; ++value) - if (unreached.find(value) == unreached.end() && atom.value != value) { - unordered_set exclude(domain_size); - exclude = unreached; - exclude.insert(value); - /* - If that value is crucial for achieving the LM from the - initial state, we have found a new landmark. - */ - if (!domain_connectivity(initial_state, atom, exclude)) { - add_simple_landmark_and_ordering( - FactPair(atom.var, value), *node, OrderingType::NATURAL); - } + forward_orderings[node] = compute_atoms_unreachable_without_landmark( + variables, landmark, reached); + if (!landmark.is_disjunctive && !landmark.is_conjunctive) { + assert(landmark.atoms.size() == 1); + const FactPair landmark_atom = landmark.atoms[0]; + const FactPair init_atom = + task_proxy.get_initial_state()[landmark_atom.var].get_pair(); + vector critical_predecessors = get_critical_dtg_predecessors( + landmark_atom.value, init_atom.value, + reached[landmark_atom.var], dtg_successors[landmark_atom.var]); + for (int value : critical_predecessors) { + add_simple_landmark_and_ordering(FactPair(landmark_atom.var, value), + *node, OrderingType::NATURAL); } + } } -bool LandmarkFactoryRpgSasp::domain_connectivity( - const State &initial_state, const FactPair &landmark, - const unordered_set &exclude) { - /* - Tests whether in the domain transition graph of the LM variable, there is - a path from the initial state value to the LM value, without passing through - any value in "exclude". If not, that means that one of the values in "exclude" - is crucial for achieving the landmark (i.e. is on every path to the LM). - */ - int var = landmark.var; - assert(landmark.value != initial_state[var].get_value()); // no initial state landmarks - // The value that we want to achieve must not be excluded: - assert(exclude.find(landmark.value) == exclude.end()); - // If the value in the initial state is excluded, we won't achieve our goal value: - if (exclude.find(initial_state[var].get_value()) != exclude.end()) - return false; - deque open; - unordered_set closed(initial_state[var].get_variable().get_domain_size()); - closed = exclude; - open.push_back(initial_state[var].get_value()); - closed.insert(initial_state[var].get_value()); - const vector> &successors = dtg_successors[var]; - while (closed.find(landmark.value) == closed.end()) { - if (open.empty()) // landmark not in closed and nothing more to insert - return false; - const int c = open.front(); - open.pop_front(); - for (int val : successors[c]) { - if (closed.find(val) == closed.end()) { - open.push_back(val); - closed.insert(val); +bool LandmarkFactoryRpgSasp::atom_and_landmark_achievable_together( + const FactPair &atom, const Landmark &landmark) const { + assert(!landmark.is_conjunctive); + for (const FactPair &landmark_atom : landmark.atoms) { + if (atom == landmark_atom) { + return true; + } + + /* Make sure there is no operator that reaches both `atom` and + `landmark_atom` at the same time. */ + const vector &atom_achievers = + get_operators_including_effect(atom); + const vector &landmark_achievers = + get_operators_including_effect(landmark_atom); + for (int atom_achiever_id : atom_achievers) { + for (int landmark_achiever_id : landmark_achievers) { + if (atom_achiever_id == landmark_achiever_id) { + return true; + } } } } - return true; + return false; } -void LandmarkFactoryRpgSasp::find_forward_orders( - const VariablesProxy &variables, const vector> &reached, - LandmarkNode *node) { - /* - The landmark of `node` is ordered before any atom that cannot be reached - before the landmark of `node` according to relaxed planning graph (as - captured in `reached`). These orderings are saved in the `forward_orders` - and added to the landmark graph in `add_landmark_forward_orderings`. - */ +/* + The landmark of `node` is ordered before any atom that cannot be reached + before the landmark of `node` according to relaxed planning graph (as captured + in `reached`). These orderings are saved in `forward_orderings` and added to + the landmark graph in `add_landmark_forward_orderings` when it is known which + atoms are actually landmarks. +*/ +utils::HashSet LandmarkFactoryRpgSasp::compute_atoms_unreachable_without_landmark( + const VariablesProxy &variables, const Landmark &landmark, + const vector> &reached) const { + utils::HashSet unreachable_atoms; for (VariableProxy var : variables) { for (int value = 0; value < var.get_domain_size(); ++value) { - if (reached[var.get_id()][value]) - continue; - const FactPair atom(var.get_id(), value); - - bool insert = true; - for (const FactPair &landmark_atom : node->get_landmark().atoms) { - if (atom != landmark_atom) { - /* Make sure there is no operator that reaches both `atom` - and (var, value) at the same time. */ - bool intersection_empty = true; - const vector &atom_achievers = - get_operators_including_effect(atom); - const vector &landmark_achievers = - get_operators_including_effect(landmark_atom); - for (size_t j = 0; j < atom_achievers.size() && intersection_empty; ++j) - for (size_t k = 0; k < landmark_achievers.size() - && intersection_empty; ++k) - if (atom_achievers[j] == landmark_achievers[k]) - intersection_empty = false; - - if (!intersection_empty) { - insert = false; - break; - } - } else { - insert = false; - break; - } + FactPair atom(var.get_id(), value); + if (!reached[atom.var][atom.value] && + !atom_and_landmark_achievable_together(atom, landmark)) { + unreachable_atoms.insert(atom); } - if (insert) - forward_orders[node].insert(atom); } } + return unreachable_atoms; } void LandmarkFactoryRpgSasp::add_landmark_forward_orderings() { for (const auto &node : *landmark_graph) { - for (const auto &node2_pair : forward_orders[node.get()]) { + for (const auto &node2_pair : forward_orderings[node.get()]) { if (landmark_graph->contains_simple_landmark(node2_pair)) { LandmarkNode &node2 = landmark_graph->get_simple_landmark_node(node2_pair); @@ -740,11 +733,11 @@ void LandmarkFactoryRpgSasp::add_landmark_forward_orderings() { *node, node2, OrderingType::NATURAL); } } - forward_orders[node.get()].clear(); + forward_orderings[node.get()].clear(); } } -void LandmarkFactoryRpgSasp::discard_disjunctive_landmarks() { +void LandmarkFactoryRpgSasp::discard_disjunctive_landmarks() const { /* Using disjunctive landmarks during landmark generation can be beneficial even if we don't want to use disjunctive landmarks during search. So we diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index bc61adcb7c..7c9dbf72b6 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -17,8 +17,9 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { std::deque open_landmarks; std::vector> disjunction_classes; - std::unordered_map> forward_orders; + std::unordered_map> forward_orderings; + // TODO: Maybe introduce a class or struct to represent domain transition graph(s). /* The entry `dtg_successors[var][val]` contains all successor values of the atom var->val in the domain transition graph (aka atomic projection). */ std::vector>> dtg_successors; @@ -30,9 +31,12 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { const std::unordered_map &effect_conditions); void build_dtg_successors(const TaskProxy &task_proxy); void add_dtg_successor(int var_id, int pre, int post); - void find_forward_orders(const VariablesProxy &variables, - const std::vector> &reached, - LandmarkNode *node); + bool atom_and_landmark_achievable_together( + const FactPair &atom, const Landmark &landmark) const; + utils::HashSet compute_atoms_unreachable_without_landmark( + const VariablesProxy &variables, const Landmark &landmark, + const std::vector> &reached) const; + void add_landmark_forward_orderings(); std::unordered_map compute_shared_preconditions( @@ -72,17 +76,13 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { OrderingType type) const; void add_disjunctive_landmark_and_ordering( const std::set &atoms, LandmarkNode &node, OrderingType type); - void approximate_lookahead_orders( + void approximate_lookahead_orderings( const TaskProxy &task_proxy, const std::vector> &reached, LandmarkNode *node); - // TODO: Rename this function. - bool domain_connectivity(const State &initial_state, - const FactPair &landmark, - const std::unordered_set &exclude); void build_disjunction_classes(const TaskProxy &task_proxy); - void discard_disjunctive_landmarks(); + void discard_disjunctive_landmarks() const; public: LandmarkFactoryRpgSasp(bool disjunctive_landmarks, bool use_orders, utils::Verbosity verbosity); From e54387e49e4a87f326352377ed6d98d13c102530 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Mon, 24 Mar 2025 17:19:53 +0100 Subject: [PATCH 063/108] Replace data structures. --- .../landmark_factory_reasonable_orders_hps.cc | 12 ---- .../landmarks/landmark_factory_rpg_sasp.cc | 62 ++++++++----------- .../landmarks/landmark_factory_rpg_sasp.h | 4 +- src/search/landmarks/util.cc | 12 ++++ src/search/landmarks/util.h | 6 ++ 5 files changed, 47 insertions(+), 49 deletions(-) diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index a5c1c5bb81..6701600ea3 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -276,18 +276,6 @@ static utils::HashSet get_effects_on_other_variables( return next_effect; } -static utils::HashSet get_intersection( - const utils::HashSet &set1, - const utils::HashSet &set2) { - utils::HashSet intersection; - for (const FactPair &atom : set1) { - if (set2.contains(atom)) { - intersection.insert(atom); - } - } - return intersection; -} - utils::HashSet LandmarkFactoryReasonableOrdersHPS::get_shared_effects_of_achievers( const FactPair &atom, const TaskProxy &task_proxy) const { utils::HashSet shared_effects; diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 448a8c6e61..27c6f33c78 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -21,19 +21,6 @@ using namespace std; using utils::ExitCode; namespace landmarks { -/* TODO: Can we combine this with the intersection defined in reasonable - order factory? */ -static unordered_map get_intersection( - const unordered_map &map1, const unordered_map &map2) { - unordered_map intersection; - for (auto [key, value] : map1) { - if (map2.contains(key) && map2.at(key) == value) { - intersection[key] = value; - } - } - return intersection; -} - LandmarkFactoryRpgSasp::LandmarkFactoryRpgSasp( bool disjunctive_landmarks, bool use_orders, utils::Verbosity verbosity) : LandmarkFactoryRelaxation(verbosity), @@ -107,10 +94,10 @@ void LandmarkFactoryRpgSasp::add_dtg_successor(int var_id, int pre, int post) { // Returns the set of variables occurring in the precondition. static unordered_set add_preconditions( - const OperatorProxy &op, unordered_map &result) { + const OperatorProxy &op, utils::HashSet &result) { unordered_set precondition_variables; for (FactProxy precondition : op.get_preconditions()) { - result[precondition.get_variable().get_id()] = precondition.get_value(); + result.insert(precondition.get_pair()); precondition_variables.insert(precondition.get_variable().get_id()); } return precondition_variables; @@ -126,7 +113,7 @@ static void add_binary_variable_conditions( const TaskProxy &task_proxy, const Landmark &landmark, const EffectsProxy &effects, const unordered_set &precondition_variables, - unordered_map &result) { + utils::HashSet &result) { State initial_state = task_proxy.get_initial_state(); for (EffectProxy effect : effects) { FactProxy effect_atom = effect.get_fact(); @@ -136,7 +123,11 @@ static void add_binary_variable_conditions( for (const FactPair &atom : landmark.atoms) { if (atom.var == var_id && initial_state[var_id].get_value() != atom.value) { - result[var_id] = initial_state[var_id].get_value(); + assert(ranges::none_of(result.begin(), result.end(), + [&](const FactPair &result_atom) { + return result_atom.var == var_id; + })); + result.insert(initial_state[var_id].get_pair()); break; } } @@ -146,9 +137,9 @@ static void add_binary_variable_conditions( static void add_effect_conditions( const Landmark &landmark, const EffectsProxy &effects, - unordered_map &result) { + utils::HashSet &result) { // Intersect effect conditions of all effects that can achieve `landmark`. - unordered_map intersection; + utils::HashSet intersection; bool init = true; for (const EffectProxy &effect : effects) { const FactPair &effect_atom = effect.get_fact().get_pair(); @@ -161,9 +152,9 @@ static void add_effect_conditions( return; } - unordered_map effect_condition; + utils::HashSet effect_condition; for (FactProxy atom : effect.get_conditions()) { - effect_condition[atom.get_variable().get_id()] = atom.get_value(); + effect_condition.insert(atom.get_pair()); } if (init) { swap(intersection, effect_condition); @@ -189,10 +180,10 @@ static void add_effect_conditions( (3) shared effect conditions of all conditional effects that achieve the landmark. */ -static unordered_map approximate_preconditions_to_achieve_landmark( +static utils::HashSet approximate_preconditions_to_achieve_landmark( const TaskProxy &task_proxy, const Landmark &landmark, const OperatorProxy &op) { - unordered_map result; + utils::HashSet result; unordered_set precondition_variables = add_preconditions(op, result); EffectsProxy effects = op.get_effects(); add_binary_variable_conditions( @@ -326,11 +317,11 @@ void LandmarkFactoryRpgSasp::add_disjunctive_landmark_and_ordering( /* Compute the shared preconditions of all operators that can potentially achieve `landmark`, given the reachability in the relaxed planning graph. */ -unordered_map LandmarkFactoryRpgSasp::compute_shared_preconditions( +utils::HashSet LandmarkFactoryRpgSasp::compute_shared_preconditions( const TaskProxy &task_proxy, const Landmark &landmark, const vector> &reached) const { // TODO: Could this be a set of FactPair instead? - unordered_map shared_preconditions; + utils::HashSet shared_preconditions; bool init = true; for (const FactPair &atom : landmark.atoms) { const vector &op_ids = get_operators_including_effect(atom); @@ -338,7 +329,7 @@ unordered_map LandmarkFactoryRpgSasp::compute_shared_preconditions( OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axiom_id); if (possibly_reaches_landmark(op, reached, landmark)) { - unordered_map preconditions = + utils::HashSet preconditions = approximate_preconditions_to_achieve_landmark( task_proxy, landmark, op); if (init) { @@ -418,7 +409,8 @@ void LandmarkFactoryRpgSasp::build_disjunction_classes( /* Insert predicate into unordered_map or extract value that is already there. */ pair entry(predicate, predicate_to_index.size()); - disjunction_class = predicate_to_index.insert(entry).first->second; + disjunction_class = + predicate_to_index.insert(entry).first->second; } disjunction_classes[var.get_id()].push_back(disjunction_class); } @@ -436,10 +428,10 @@ vector LandmarkFactoryRpgSasp::get_operators_achieving_landmark( } void LandmarkFactoryRpgSasp::extend_disjunction_class_lookups( - const unordered_map &landmark_preconditions, int op_id, - unordered_map> &preconditions, - unordered_map> &used_operators) const { - for (const auto &[var, value] : landmark_preconditions) { + const utils::HashSet &landmark_preconditions, int op_id, + unordered_map> &preconditions, + unordered_map> &used_operators) const { + for (auto [var, value] : landmark_preconditions) { int disjunction_class = disjunction_classes[var][value]; if (disjunction_class == -1) { /* This atom may not participate in any disjunctive @@ -496,7 +488,7 @@ vector> LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( get_operator_or_axiom(task_proxy, op_id); if (possibly_reaches_landmark(op, reached, landmark)) { ++num_ops; - unordered_map landmark_preconditions = + utils::HashSet landmark_preconditions = approximate_preconditions_to_achieve_landmark( task_proxy, landmark, op); extend_disjunction_class_lookups( @@ -522,13 +514,13 @@ void LandmarkFactoryRpgSasp::generate_goal_landmarks( void LandmarkFactoryRpgSasp::generate_shared_precondition_landmarks( const TaskProxy &task_proxy, const Landmark &landmark, LandmarkNode *node, const vector> &reached) { - unordered_map shared_preconditions = + utils::HashSet shared_preconditions = compute_shared_preconditions(task_proxy, landmark, reached); /* All shared preconditions are landmarks, and greedy-necessary predecessors of `landmark`. */ - for (auto [var, value] : shared_preconditions) { + for (const FactPair &atom : shared_preconditions) { add_simple_landmark_and_ordering( - FactPair(var, value), *node, OrderingType::GREEDY_NECESSARY); + atom, *node, OrderingType::GREEDY_NECESSARY); } } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index 7c9dbf72b6..534f9178dd 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -39,13 +39,13 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { void add_landmark_forward_orderings(); - std::unordered_map compute_shared_preconditions( + utils::HashSet compute_shared_preconditions( const TaskProxy &task_proxy, const Landmark &landmark, const std::vector> &reached) const; std::vector get_operators_achieving_landmark( const Landmark &landmark) const; void extend_disjunction_class_lookups( - const std::unordered_map &landmark_preconditions, int op_id, + const utils::HashSet &landmark_preconditions, int op_id, std::unordered_map> &preconditions, std::unordered_map> &used_operators) const; std::vector> compute_disjunctive_preconditions( diff --git a/src/search/landmarks/util.cc b/src/search/landmarks/util.cc index 6b6b2c4017..3dd24e8172 100644 --- a/src/search/landmarks/util.cc +++ b/src/search/landmarks/util.cc @@ -38,6 +38,18 @@ bool possibly_reaches_landmark(const OperatorProxy &op, }); } +utils::HashSet get_intersection( + const utils::HashSet &set1, + const utils::HashSet &set2) { + utils::HashSet intersection; + for (const FactPair &atom : set1) { + if (set2.contains(atom)) { + intersection.insert(atom); + } + } + return intersection; +} + OperatorProxy get_operator_or_axiom(const TaskProxy &task_proxy, int op_or_axiom_id) { if (op_or_axiom_id < 0) { diff --git a/src/search/landmarks/util.h b/src/search/landmarks/util.h index b8b70068b5..928976cd91 100644 --- a/src/search/landmarks/util.h +++ b/src/search/landmarks/util.h @@ -1,8 +1,11 @@ #ifndef LANDMARKS_UTIL_H #define LANDMARKS_UTIL_H +#include "../utils/hash.h" + #include +struct FactPair; class OperatorProxy; class TaskProxy; @@ -19,6 +22,9 @@ extern bool possibly_reaches_landmark( const OperatorProxy &op, const std::vector> &reached, const Landmark &landmark); +extern utils::HashSet get_intersection( + const utils::HashSet &set1, const utils::HashSet &set2); + extern OperatorProxy get_operator_or_axiom( const TaskProxy &task_proxy, int op_or_axiom_id); extern int get_operator_or_axiom_id(const OperatorProxy &op); From 3eb6c8f0d96aa33a0e50bda2efc1a85c219f0b93 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 25 Mar 2025 17:07:00 +0100 Subject: [PATCH 064/108] Break apart landmark extraction. --- .../landmarks/landmark_factory_zhu_givan.cc | 108 ++++++++++-------- .../landmarks/landmark_factory_zhu_givan.h | 86 +++++++------- 2 files changed, 107 insertions(+), 87 deletions(-) diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index 3db491d3fb..cdaec2ff19 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -29,9 +29,8 @@ void LandmarkFactoryZhuGivan::generate_relaxed_landmarks( } compute_triggers(task_proxy); - - PropositionLayer last_prop_layer = build_relaxed_plan_graph_with_labels(task_proxy); - + PropositionLayer last_prop_layer = + build_relaxed_plan_graph_with_labels(task_proxy); extract_landmarks(task_proxy, last_prop_layer); /* TODO: Ensure that landmark orderings are not even added if @@ -41,66 +40,81 @@ void LandmarkFactoryZhuGivan::generate_relaxed_landmarks( } } -void LandmarkFactoryZhuGivan::extract_landmarks( - const TaskProxy &task_proxy, const PropositionLayer &last_prop_layer) { - /* - We first check if at least one of the goal facts is relaxed unreachable. - In this case we create a graph with just this fact as landmark. Since - the landmark will have no achievers, the heuristic can detect the - initial state as a dead-end. - */ +/* + Check if any goal atom is unreachable in the delete relaxation. If so, we + create a graph with just this atom as landmark and empty achievers to signal + to the heuristic that the initial state as a dead-end. +*/ +bool LandmarkFactoryZhuGivan::goal_is_reachable( + const TaskProxy &task_proxy, const PropositionLayer &prop_layer) const { for (FactProxy goal : task_proxy.get_goals()) { - if (!last_prop_layer[goal.get_variable().get_id()][goal.get_value()].reached()) { + if (!prop_layer[goal.get_variable().get_id()][goal.get_value()].reached()) { if (log.is_at_least_normal()) { log << "Problem not solvable, even if relaxed." << endl; } Landmark landmark({goal.get_pair()}, false, false, true); landmark_graph->add_landmark(move(landmark)); - return; + return false; } } + return true; +} + +LandmarkNode *LandmarkFactoryZhuGivan::create_goal_landmark( + const FactPair &goal) const { + LandmarkNode *node; + if (landmark_graph->contains_simple_landmark(goal)) { + node = &landmark_graph->get_simple_landmark_node(goal); + node->get_landmark().is_true_in_goal = true; + } else { + Landmark landmark({goal}, false, false, true); + node = &landmark_graph->add_landmark(move(landmark)); + } + return node; +} - State initial_state = task_proxy.get_initial_state(); - // insert goal landmarks and mark them as goals - for (FactProxy goal : task_proxy.get_goals()) { - FactPair goal_landmark = goal.get_pair(); - // TODO: rename `lm_node` (avoid lm). - LandmarkNode *lm_node; - if (landmark_graph->contains_simple_landmark(goal_landmark)) { - lm_node = &landmark_graph->get_simple_landmark_node(goal_landmark); - lm_node->get_landmark().is_true_in_goal = true; +void LandmarkFactoryZhuGivan::extract_landmarks_and_orderings_from_goal_labels( + const FactPair &goal, const PropositionLayer &prop_layer, + LandmarkNode *goal_landmark_node) const { + const PlanGraphNode &goal_node = prop_layer[goal.var][goal.value]; + assert(goal_node.reached()); + + for (const FactPair &atom : goal_node.labels) { + if (atom == goal) { + // Ignore label on itself. + continue; + } + + LandmarkNode *node; + if (landmark_graph->contains_simple_landmark(atom)) { + node = &landmark_graph->get_simple_landmark_node(atom); } else { - Landmark landmark({goal_landmark}, false, false, true); - lm_node = &landmark_graph->add_landmark(move(landmark)); + Landmark landmark({atom}, false, false); + node = &landmark_graph->add_landmark(move(landmark)); } - // extract landmarks from goal labels - const plan_graph_node &goal_node = - last_prop_layer[goal_landmark.var][goal_landmark.value]; - - assert(goal_node.reached()); - - // TODO: get rid of `lm` (avoid lm). - for (const FactPair &lm : goal_node.labels) { - if (lm == goal_landmark) // ignore label on itself - continue; - LandmarkNode *node; - // Add new landmarks - if (!landmark_graph->contains_simple_landmark(lm)) { - Landmark landmark({lm}, false, false); - node = &landmark_graph->add_landmark(move(landmark)); - } else { - node = &landmark_graph->get_simple_landmark_node(lm); - } - // TODO: Update comment below after renaming. - // Add order: lm ->_{nat} lm - assert(node->parents.find(lm_node) == node->parents.end()); - assert(lm_node->children.find(node) == lm_node->children.end()); + if (use_orders) { + assert(!node->parents.contains(goal_landmark_node)); + assert(!goal_landmark_node->children.contains(node)); add_or_replace_ordering_if_stronger( - *node, *lm_node, OrderingType::NATURAL); + *node, *goal_landmark_node, OrderingType::NATURAL); } } } +void LandmarkFactoryZhuGivan::extract_landmarks( + const TaskProxy &task_proxy, + const PropositionLayer &last_prop_layer) const { + if (!goal_is_reachable(task_proxy, last_prop_layer)) { + return; + } + for (FactProxy goal : task_proxy.get_goals()) { + FactPair goal_atom = goal.get_pair(); + LandmarkNode *node = create_goal_landmark(goal_atom); + extract_landmarks_and_orderings_from_goal_labels( + goal_atom, last_prop_layer, node); + } +} + LandmarkFactoryZhuGivan::PropositionLayer LandmarkFactoryZhuGivan::build_relaxed_plan_graph_with_labels( const TaskProxy &task_proxy) const { assert(!triggers.empty()); diff --git a/src/search/landmarks/landmark_factory_zhu_givan.h b/src/search/landmarks/landmark_factory_zhu_givan.h index e58b83b17b..4f83ccd4de 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.h +++ b/src/search/landmarks/landmark_factory_zhu_givan.h @@ -6,68 +6,74 @@ #include "../utils/hash.h" #include -#include #include namespace landmarks { using LandmarkSet = utils::HashSet; class LandmarkFactoryZhuGivan : public LandmarkFactoryRelaxation { - class plan_graph_node { -public: + struct PlanGraphNode { LandmarkSet labels; - inline bool reached() const { - // NOTE: nodes are always labeled with itself, - // if they have been reached + bool reached() const { + // NOTE: Reached nodes are always labeled with at least themselves. return !labels.empty(); } }; - using PropositionLayer = std::vector>; + using PropositionLayer = std::vector>; const bool use_orders; - // triggers[i][j] is a list of operators that could reach/change - // labels on some proposition, after proposition (i,j) has changed + /* The entry `triggers[i][j]` is a list of operators that could reach/change + labels on some proposition, after proposition (i,j) has changed. */ std::vector>> triggers; void compute_triggers(const TaskProxy &task_proxy); - // Note: must include operators that only have conditional effects + // Note: must include operators that only have conditional effects. std::vector operators_without_preconditions; - bool operator_applicable(const OperatorProxy &op, const PropositionLayer &state) const; - - bool operator_cond_effect_fires(const EffectConditionsProxy &effect_conditions, - const PropositionLayer &layer) const; - - // Apply operator and propagate labels to next layer. Returns set of - // propositions that: - // (a) have just been reached OR (b) had their labels changed in next - // proposition layer - LandmarkSet apply_operator_and_propagate_labels(const OperatorProxy &op, - const PropositionLayer ¤t, PropositionLayer &next) const; - - // Calculate the union of precondition labels of op, using the - // labels from current - LandmarkSet union_of_precondition_labels(const OperatorProxy &op, - const PropositionLayer ¤t) const; - - // Calculate the union of precondition labels of a conditional effect, - // using the labels from current - LandmarkSet union_of_condition_labels(const EffectConditionsProxy &effect_conditions, - const PropositionLayer ¤t) const; - - // Relaxed exploration, returns the last proposition layer - // (the fixpoint) with labels - PropositionLayer build_relaxed_plan_graph_with_labels(const TaskProxy &task_proxy) const; - - // Extract landmarks from last proposition layer and add them to the - // landmarks graph + bool operator_applicable( + const OperatorProxy &op, const PropositionLayer &state) const; + + bool operator_cond_effect_fires( + const EffectConditionsProxy &effect_conditions, + const PropositionLayer &layer) const; + + /* Returns a set of propositions that: (a) have just been reached or (b) had + their labels changed in next proposition layer. */ + LandmarkSet apply_operator_and_propagate_labels( + const OperatorProxy &op, const PropositionLayer ¤t, + PropositionLayer &next) const; + + /* Calculate the union of precondition labels of `op`, using the + labels from the current layer. */ + LandmarkSet union_of_precondition_labels( + const OperatorProxy &op, const PropositionLayer ¤t) const; + + /* Calculate the union of precondition labels of a conditional effect + using the labels from the current layer. */ + LandmarkSet union_of_condition_labels( + const EffectConditionsProxy &effect_conditions, + const PropositionLayer ¤t) const; + + /* Relaxed exploration, returns the last proposition layer + (the fixpoint) with labels. */ + PropositionLayer build_relaxed_plan_graph_with_labels( + const TaskProxy &task_proxy) const; + + bool goal_is_reachable(const TaskProxy &task_proxy, + const PropositionLayer &prop_layer) const; + LandmarkNode *create_goal_landmark(const FactPair &goal) const; + void extract_landmarks_and_orderings_from_goal_labels( + const FactPair &goal, const PropositionLayer &prop_layer, + LandmarkNode *goal_landmark_node) const; + /* Construct a landmark graph using the landmarks on the given + proposition layer. */ void extract_landmarks(const TaskProxy &task_proxy, - const PropositionLayer &last_prop_layer); + const PropositionLayer &last_prop_layer) const; - // Link operators to its propositions in trigger list. + // Link an operators to its propositions in the trigger list. void add_operator_to_triggers(const OperatorProxy &op); virtual void generate_relaxed_landmarks( From 171ad0fa9d2f7a59662f7d698916968f91fa45ac Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 26 Mar 2025 10:02:55 +0100 Subject: [PATCH 065/108] Finish refactoring lm_zg. --- .../landmarks/landmark_factory_zhu_givan.cc | 267 +++++++++--------- .../landmarks/landmark_factory_zhu_givan.h | 27 +- src/search/landmarks/util.cc | 5 + src/search/landmarks/util.h | 2 + 4 files changed, 147 insertions(+), 154 deletions(-) diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index cdaec2ff19..1b774b50e9 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -29,15 +29,9 @@ void LandmarkFactoryZhuGivan::generate_relaxed_landmarks( } compute_triggers(task_proxy); - PropositionLayer last_prop_layer = + PropositionLayer last_layer = build_relaxed_plan_graph_with_labels(task_proxy); - extract_landmarks(task_proxy, last_prop_layer); - - /* TODO: Ensure that landmark orderings are not even added if - `use_orders` is false. */ - if (!use_orders) { - discard_all_orderings(); - } + extract_landmarks(task_proxy, last_layer); } /* @@ -115,182 +109,177 @@ void LandmarkFactoryZhuGivan::extract_landmarks( } } -LandmarkFactoryZhuGivan::PropositionLayer LandmarkFactoryZhuGivan::build_relaxed_plan_graph_with_labels( - const TaskProxy &task_proxy) const { - assert(!triggers.empty()); - - PropositionLayer current_prop_layer; - unordered_set triggered(task_proxy.get_operators().size() + task_proxy.get_axioms().size()); - - // set initial layer - State initial_state = task_proxy.get_initial_state(); - VariablesProxy variables = task_proxy.get_variables(); - current_prop_layer.resize(variables.size()); +LandmarkFactoryZhuGivan::PropositionLayer LandmarkFactoryZhuGivan::initialize_relaxed_plan_graph( + const TaskProxy &task_proxy, unordered_set &triggered_ops) const { + const State &initial_state = task_proxy.get_initial_state(); + const VariablesProxy &variables = task_proxy.get_variables(); + PropositionLayer initial_layer; + initial_layer.resize(variables.size()); for (VariableProxy var : variables) { int var_id = var.get_id(); - current_prop_layer[var_id].resize(var.get_domain_size()); + initial_layer[var_id].resize(var.get_domain_size()); // label nodes from initial state int value = initial_state[var].get_value(); - current_prop_layer[var_id][value].labels.emplace(var_id, value); + initial_layer[var_id][value].labels.emplace(var_id, value); - triggered.insert(triggers[var_id][value].begin(), triggers[var_id][value].end()); + triggered_ops.insert( + triggers[var_id][value].begin(), triggers[var_id][value].end()); } - // Operators without preconditions do not propagate labels. So if they have - // no conditional effects, is only necessary to apply them once. (If they - // have conditional effects, they will be triggered at later stages again). - triggered.insert(operators_without_preconditions.begin(), - operators_without_preconditions.end()); + return initial_layer; +} +void LandmarkFactoryZhuGivan::propagate_labels_until_fixed_point_reached( + const TaskProxy &task_proxy, unordered_set &&triggered_ops, + PropositionLayer ¤t_layer) const { bool changes = true; while (changes) { - PropositionLayer next_prop_layer(current_prop_layer); - unordered_set next_triggered; + PropositionLayer next_layer(current_layer); + unordered_set next_triggers; changes = false; - for (int op_or_axiom_id : triggered) { - OperatorProxy op = get_operator_or_axiom(task_proxy, op_or_axiom_id); - if (operator_applicable(op, current_prop_layer)) { + for (int op_or_axiom_id : triggered_ops) { + const OperatorProxy &op = + get_operator_or_axiom(task_proxy, op_or_axiom_id); + if (operator_is_applicable(op, current_layer)) { LandmarkSet changed = apply_operator_and_propagate_labels( - op, current_prop_layer, next_prop_layer); + op, current_layer, next_layer); if (!changed.empty()) { changes = true; for (const FactPair &landmark : changed) - next_triggered.insert( + next_triggers.insert( triggers[landmark.var][landmark.value].begin(), triggers[landmark.var][landmark.value].end()); } } } - current_prop_layer = next_prop_layer; - triggered = next_triggered; + swap(current_layer, next_layer); + swap(triggered_ops, next_triggers); } +} + + +LandmarkFactoryZhuGivan::PropositionLayer LandmarkFactoryZhuGivan::build_relaxed_plan_graph_with_labels( + const TaskProxy &task_proxy) const { + assert(!triggers.empty()); - return current_prop_layer; + unordered_set triggered_ops( + task_proxy.get_operators().size() + task_proxy.get_axioms().size()); + PropositionLayer current_layer = + initialize_relaxed_plan_graph(task_proxy, triggered_ops); + + /* + Operators without preconditions do not propagate labels. So if they have + no conditional effects, it is only necessary to apply them once. If they + have conditional effects, they will be triggered again at later stages. + */ + triggered_ops.insert(operators_without_preconditions.begin(), + operators_without_preconditions.end()); + propagate_labels_until_fixed_point_reached( + task_proxy, move(triggered_ops), current_layer); + return current_layer; } -bool LandmarkFactoryZhuGivan::operator_applicable(const OperatorProxy &op, - const PropositionLayer &state) const { - // test preconditions - for (FactProxy fact : op.get_preconditions()) - if (!state[fact.get_variable().get_id()][fact.get_value()].reached()) +bool LandmarkFactoryZhuGivan::operator_is_applicable( + const OperatorProxy &op, const PropositionLayer &state) { + for (FactProxy atom : op.get_preconditions()) { + auto [var, value] = atom.get_pair(); + if (!state[var][value].reached()) { return false; + } + } return true; } -bool LandmarkFactoryZhuGivan::operator_cond_effect_fires( - const EffectConditionsProxy &effect_conditions, const PropositionLayer &layer) const { - for (FactProxy effect_condition : effect_conditions) - if (!layer[effect_condition.get_variable().get_id()][effect_condition.get_value()].reached()) +bool LandmarkFactoryZhuGivan::conditional_effect_fires( + const EffectConditionsProxy &effect_conditions, + const PropositionLayer &layer) { + for (FactProxy effect_condition : effect_conditions) { + auto [var, value] = effect_condition.get_pair(); + if (!layer[var][value].reached()) { return false; + } + } return true; } -static LandmarkSet _union(const LandmarkSet &a, const LandmarkSet &b) { - if (a.size() < b.size()) - return _union(b, a); - - LandmarkSet result = a; - - for (LandmarkSet::const_iterator it = b.begin(); it != b.end(); ++it) - result.insert(*it); - return result; -} - -static LandmarkSet _intersection(const LandmarkSet &a, const LandmarkSet &b) { - if (a.size() > b.size()) - return _intersection(b, a); - - LandmarkSet result; - - for (LandmarkSet::const_iterator it = a.begin(); it != a.end(); ++it) - if (b.find(*it) != b.end()) - result.insert(*it); - return result; -} - -LandmarkSet LandmarkFactoryZhuGivan::union_of_precondition_labels( - const OperatorProxy &op, const PropositionLayer ¤t) const { - LandmarkSet result; - - // TODO This looks like an O(n^2) algorithm where O(n log n) would do, a - // bit like the Python string concatenation anti-pattern. - for (FactProxy precondition : op.get_preconditions()) - result = _union(result, - current[precondition.get_variable().get_id()][precondition.get_value()].labels); - - return result; -} - LandmarkSet LandmarkFactoryZhuGivan::union_of_condition_labels( - const EffectConditionsProxy &effect_conditions, const PropositionLayer ¤t) const { + const ConditionsProxy &conditions, const PropositionLayer ¤t) { + /* TODO This looks like an O(n^2) algorithm where O(n log n) would + do, a bit like the Python string concatenation anti-pattern. */ LandmarkSet result; - for (FactProxy effect_condition : effect_conditions) - result = _union(result, current[effect_condition.get_variable().get_id()][effect_condition.get_value()].labels); - + for (FactProxy precondition : conditions) { + auto [var, value] = precondition.get_pair(); + union_inplace(result, current[var][value].labels); + } return result; } -static bool _propagate_labels(LandmarkSet &labels, const LandmarkSet &new_labels, - const FactPair &prop) { - LandmarkSet old_labels = labels; +// Returns whether labels have changed or `atom` has just been reached. +static bool propagate_labels( + LandmarkSet &labels, const LandmarkSet &new_labels, const FactPair &atom) { + int old_labels_size = static_cast(labels.size()); - if (!labels.empty()) { - labels = _intersection(labels, new_labels); - } else { + // If this is the first time `atom` is reached, it has an empty label set. + if (labels.empty()) { labels = new_labels; + } else { + labels = get_intersection(labels, new_labels); } - labels.insert(prop); - - assert(old_labels.empty() || old_labels.size() >= labels.size()); - assert(!labels.empty()); - // test if labels have changed or proposition has just been reached: - // if it has just been reached: - // (old_labels.size() == 0) && (labels.size() >= 1) - // if old_labels.size() == labels.size(), then labels have not been refined - // by intersection. - return old_labels.size() != labels.size(); + // `atom` is a landmark for itself. + labels.insert(atom); + + /* + Updates should always reduce the label set (intersection), except in the + special case where `atom` was reached for the first time. + TODO: It would be more accurate to actually test the superset + relationship instead of just comparing set sizes. However, doing so + requires storing a copy of `labels` just to assert this. Also, it's + probably reasonable to trust the implementation of `get_intersection` + used above enough to not even assert this at all here. + */ + int new_labels_size = static_cast(labels.size()); + assert(old_labels_size == 0 || old_labels_size >= new_labels_size); + return old_labels_size != new_labels_size; } LandmarkSet LandmarkFactoryZhuGivan::apply_operator_and_propagate_labels( const OperatorProxy &op, const PropositionLayer ¤t, PropositionLayer &next) const { - assert(operator_applicable(op, current)); + assert(operator_is_applicable(op, current)); + LandmarkSet precondition_labels = + union_of_condition_labels(op.get_preconditions(), current); LandmarkSet result; - LandmarkSet precond_label_union = union_of_precondition_labels(op, current); - - for (EffectProxy effect : op.get_effects()) { - FactPair effect_fact = effect.get_fact().get_pair(); - - if (next[effect_fact.var][effect_fact.value].labels.size() == 1) + for (const EffectProxy &effect : op.get_effects()) { + FactPair atom = effect.get_fact().get_pair(); + if (next[atom.var][atom.value].labels.size() == 1) { + // The only landmark for `atom` is `atom` itself. continue; - - if (operator_cond_effect_fires(effect.get_conditions(), current)) { - const LandmarkSet precond_label_union_with_condeff = _union( - precond_label_union, union_of_condition_labels( - // NOTE: this equals precond_label_union, if effects[i] is - // not a conditional effect. - effect.get_conditions(), current)); - - if (_propagate_labels(next[effect_fact.var][effect_fact.value].labels, - precond_label_union_with_condeff, effect_fact)) - result.insert(effect_fact); + } + if (conditional_effect_fires(effect.get_conditions(), current)) { + LandmarkSet condition_labels = + union_of_condition_labels(effect.get_conditions(), current); + union_inplace(condition_labels, precondition_labels); + bool labels_changed = propagate_labels( + next[atom.var][atom.value].labels, condition_labels, atom); + if (labels_changed) { + result.insert(atom); + } } } - return result; } void LandmarkFactoryZhuGivan::compute_triggers(const TaskProxy &task_proxy) { assert(triggers.empty()); - // initialize empty triggers - VariablesProxy variables = task_proxy.get_variables(); + // Initialize the data structure. + const VariablesProxy &variables = task_proxy.get_variables(); triggers.resize(variables.size()); - for (size_t i = 0; i < variables.size(); ++i) + for (int i = 0; i < static_cast(variables.size()); ++i) { triggers[i].resize(variables[i].get_domain_size()); + } - // compute triggers for (OperatorProxy op : task_proxy.get_operators()) { add_operator_to_triggers(op); } @@ -299,25 +288,23 @@ void LandmarkFactoryZhuGivan::compute_triggers(const TaskProxy &task_proxy) { } } -void LandmarkFactoryZhuGivan::add_operator_to_triggers(const OperatorProxy &op) { - // Collect possible triggers first. - LandmarkSet possible_triggers; - +void LandmarkFactoryZhuGivan::add_operator_to_triggers( + const OperatorProxy &op) { int op_or_axiom_id = get_operator_or_axiom_id(op); - PreconditionsProxy preconditions = op.get_preconditions(); - for (FactProxy precondition : preconditions) - possible_triggers.insert(precondition.get_pair()); - + const PreconditionsProxy &preconditions = op.get_preconditions(); + for (FactProxy precondition : preconditions) { + auto [var, value] = precondition.get_pair(); + triggers[var][value].push_back(op_or_axiom_id); + } for (EffectProxy effect : op.get_effects()) { - for (FactProxy effect_condition : effect.get_conditions()) - possible_triggers.insert(effect_condition.get_pair()); + for (FactProxy effect_condition : effect.get_conditions()) { + auto [var, value] = effect_condition.get_pair(); + triggers[var][value].push_back(op_or_axiom_id); + } } - if (preconditions.empty()) + if (preconditions.empty()) { operators_without_preconditions.push_back(op_or_axiom_id); - - // Add operator to triggers vector. - for (const FactPair &landmark : possible_triggers) - triggers[landmark.var][landmark.value].push_back(op_or_axiom_id); + } } bool LandmarkFactoryZhuGivan::supports_conditional_effects() const { diff --git a/src/search/landmarks/landmark_factory_zhu_givan.h b/src/search/landmarks/landmark_factory_zhu_givan.h index 4f83ccd4de..4f62b275d6 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.h +++ b/src/search/landmarks/landmark_factory_zhu_givan.h @@ -33,12 +33,12 @@ class LandmarkFactoryZhuGivan : public LandmarkFactoryRelaxation { // Note: must include operators that only have conditional effects. std::vector operators_without_preconditions; - bool operator_applicable( - const OperatorProxy &op, const PropositionLayer &state) const; + static bool operator_is_applicable( + const OperatorProxy &op, const PropositionLayer &state); - bool operator_cond_effect_fires( + static bool conditional_effect_fires( const EffectConditionsProxy &effect_conditions, - const PropositionLayer &layer) const; + const PropositionLayer &layer); /* Returns a set of propositions that: (a) have just been reached or (b) had their labels changed in next proposition layer. */ @@ -46,17 +46,16 @@ class LandmarkFactoryZhuGivan : public LandmarkFactoryRelaxation { const OperatorProxy &op, const PropositionLayer ¤t, PropositionLayer &next) const; - /* Calculate the union of precondition labels of `op`, using the - labels from the current layer. */ - LandmarkSet union_of_precondition_labels( - const OperatorProxy &op, const PropositionLayer ¤t) const; - - /* Calculate the union of precondition labels of a conditional effect - using the labels from the current layer. */ - LandmarkSet union_of_condition_labels( - const EffectConditionsProxy &effect_conditions, - const PropositionLayer ¤t) const; + // Calculate the union of condition labels from the current layer. + static LandmarkSet union_of_condition_labels( + const ConditionsProxy &conditions, const PropositionLayer ¤t); + PropositionLayer initialize_relaxed_plan_graph( + const TaskProxy &task_proxy, + std::unordered_set &triggered_ops) const; + void propagate_labels_until_fixed_point_reached( + const TaskProxy &task_proxy, std::unordered_set &&triggered_ops, + PropositionLayer ¤t_layer) const; /* Relaxed exploration, returns the last proposition layer (the fixpoint) with labels. */ PropositionLayer build_relaxed_plan_graph_with_labels( diff --git a/src/search/landmarks/util.cc b/src/search/landmarks/util.cc index 3dd24e8172..f7b3f77d29 100644 --- a/src/search/landmarks/util.cc +++ b/src/search/landmarks/util.cc @@ -50,6 +50,11 @@ utils::HashSet get_intersection( return intersection; } +void union_inplace(utils::HashSet &set1, + const utils::HashSet &set2) { + set1.insert(set2.begin(), set2.end()); +} + OperatorProxy get_operator_or_axiom(const TaskProxy &task_proxy, int op_or_axiom_id) { if (op_or_axiom_id < 0) { diff --git a/src/search/landmarks/util.h b/src/search/landmarks/util.h index 928976cd91..626884fae1 100644 --- a/src/search/landmarks/util.h +++ b/src/search/landmarks/util.h @@ -24,6 +24,8 @@ extern bool possibly_reaches_landmark( extern utils::HashSet get_intersection( const utils::HashSet &set1, const utils::HashSet &set2); +extern void union_inplace(utils::HashSet &set1, + const utils::HashSet &set2); extern OperatorProxy get_operator_or_axiom( const TaskProxy &task_proxy, int op_or_axiom_id); From c172a81ec6a4d4d0fbdd2c18563eac181580f90c Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 26 Mar 2025 18:25:26 +0100 Subject: [PATCH 066/108] Fix DTG reasoning in RHW. --- .../landmarks/landmark_factory_rpg_sasp.cc | 45 ++++++++++--------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 27c6f33c78..fd9f6c849b 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -258,6 +258,7 @@ void LandmarkFactoryRpgSasp::add_simple_landmark_and_ordering( } Landmark landmark({atom}, false, false); + cout << "adding lm" << landmark_graph->get_num_landmarks() << endl; LandmarkNode &simple_landmark_node = landmark_graph->add_landmark(move (landmark)); open_landmarks.push_back(&simple_landmark_node); @@ -320,7 +321,6 @@ void LandmarkFactoryRpgSasp::add_disjunctive_landmark_and_ordering( utils::HashSet LandmarkFactoryRpgSasp::compute_shared_preconditions( const TaskProxy &task_proxy, const Landmark &landmark, const vector> &reached) const { - // TODO: Could this be a set of FactPair instead? utils::HashSet shared_preconditions; bool init = true; for (const FactPair &atom : landmark.atoms) { @@ -519,6 +519,7 @@ void LandmarkFactoryRpgSasp::generate_shared_precondition_landmarks( /* All shared preconditions are landmarks, and greedy-necessary predecessors of `landmark`. */ for (const FactPair &atom : shared_preconditions) { + cout << "try to add shared predecessor landmark" << endl; add_simple_landmark_and_ordering( atom, *node, OrderingType::GREEDY_NECESSARY); } @@ -598,7 +599,8 @@ static bool value_critical_to_reach_landmark( int init_value, int landmark_value, int excluded_value, const vector &reached, const vector> &successors) { assert(landmark_value != init_value); - assert(reached[landmark_value]); + assert(landmark_value != excluded_value); + assert(!reached[landmark_value]); if (excluded_value == init_value) { return true; } @@ -610,14 +612,14 @@ static bool value_critical_to_reach_landmark( int value = open.front(); open.pop_front(); for (int succ : successors[value]) { + if (succ == landmark_value) { + return false; + } if (!reached[succ]) { /* Values unreached in the delete relaxation cannot be landmarks for `landmark_value` even if they are reachable in the DTG. */ continue; } - if (succ == landmark_value) { - return false; - } if (!closed.contains(succ)) { open.push_back(succ); closed.insert(succ); @@ -630,14 +632,13 @@ static bool value_critical_to_reach_landmark( static vector get_critical_dtg_predecessors( int init_value, int landmark_value, const vector &reached, const vector> &successors) { - assert(reached[landmark_value]); + assert(!reached[landmark_value]); int domain_size = static_cast(reached.size()); vector critical; critical.reserve(domain_size); for (int value = 0; value < domain_size; ++value) { - if (value != landmark_value && reached[value] && - value_critical_to_reach_landmark(init_value, landmark_value, - value, reached, successors)) { + if (reached[value] && value_critical_to_reach_landmark( + init_value, landmark_value, value, reached, successors)) { critical.push_back(value); } } @@ -652,18 +653,20 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orderings( const Landmark &landmark = node->get_landmark(); forward_orderings[node] = compute_atoms_unreachable_without_landmark( variables, landmark, reached); - if (!landmark.is_disjunctive && !landmark.is_conjunctive) { - assert(landmark.atoms.size() == 1); - const FactPair landmark_atom = landmark.atoms[0]; - const FactPair init_atom = - task_proxy.get_initial_state()[landmark_atom.var].get_pair(); - vector critical_predecessors = get_critical_dtg_predecessors( - landmark_atom.value, init_atom.value, - reached[landmark_atom.var], dtg_successors[landmark_atom.var]); - for (int value : critical_predecessors) { - add_simple_landmark_and_ordering(FactPair(landmark_atom.var, value), - *node, OrderingType::NATURAL); - } + if (landmark.is_disjunctive || landmark.is_conjunctive) { + return; + } + assert(landmark.atoms.size() == 1); + + const FactPair landmark_atom = landmark.atoms[0]; + const FactPair init_atom = + task_proxy.get_initial_state()[landmark_atom.var].get_pair(); + vector critical_predecessors = get_critical_dtg_predecessors( + init_atom.value, landmark_atom.value, + reached[landmark_atom.var], dtg_successors[landmark_atom.var]); + for (int value : critical_predecessors) { + add_simple_landmark_and_ordering(FactPair(landmark_atom.var, value), + *node, OrderingType::NATURAL); } } From 66b32d05fd4ba93cdd97729430ba301b832b9913 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Fri, 28 Mar 2025 14:04:07 +0100 Subject: [PATCH 067/108] Really fix DTG reasoning. --- .../landmarks/landmark_factory_rpg_sasp.cc | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index fd9f6c849b..3edcc002de 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -135,6 +135,11 @@ static void add_binary_variable_conditions( } } +/* + TODO: This (accidentally) differs from the previous implementation which + imposed a stricter condition. Specifically, it also intersected over the + conditions of effects that have nothing to do with achieving the landmark. +*/ static void add_effect_conditions( const Landmark &landmark, const EffectsProxy &effects, utils::HashSet &result) { @@ -258,7 +263,6 @@ void LandmarkFactoryRpgSasp::add_simple_landmark_and_ordering( } Landmark landmark({atom}, false, false); - cout << "adding lm" << landmark_graph->get_num_landmarks() << endl; LandmarkNode &simple_landmark_node = landmark_graph->add_landmark(move (landmark)); open_landmarks.push_back(&simple_landmark_node); @@ -278,8 +282,9 @@ bool LandmarkFactoryRpgSasp::deal_with_overlapping_landmarks( return landmark_graph->contains_simple_landmark(atom); })) { /* - Do not add landmark because the simple one is stronger. Do not add the - ordering to the simple landmark(s) as they are not guaranteed to hold. + Do not add the landmark because the simple one is stronger. Do not add + the ordering(s) to the corresponding simple landmark(s) as they are + not guaranteed to hold. */ return true; } @@ -519,7 +524,6 @@ void LandmarkFactoryRpgSasp::generate_shared_precondition_landmarks( /* All shared preconditions are landmarks, and greedy-necessary predecessors of `landmark`. */ for (const FactPair &atom : shared_preconditions) { - cout << "try to add shared predecessor landmark" << endl; add_simple_landmark_and_ordering( atom, *node, OrderingType::GREEDY_NECESSARY); } @@ -615,9 +619,13 @@ static bool value_critical_to_reach_landmark( if (succ == landmark_value) { return false; } - if (!reached[succ]) { - /* Values unreached in the delete relaxation cannot be landmarks - for `landmark_value` even if they are reachable in the DTG. */ + if (!reached[succ] || succ == excluded_value) { + /* + Values unreached in the delete relaxation cannot be landmarks + for `landmark_value` even if they are reachable in the DTG. + Also, we want to check whether it is possible to reach + `landmark_value` without going through `excluded_value`. + */ continue; } if (!closed.contains(succ)) { From 26ede1d4a18ac942d3ef854562a787791d4c5f0a Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Fri, 28 Mar 2025 21:41:34 +0100 Subject: [PATCH 068/108] Clarify comments in lm_hm. --- src/search/landmarks/landmark_factory_h_m.h | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 7a55c26d8a..d17e2e849f 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -29,6 +29,7 @@ struct PropositionSetComparer { /* Corresponds to an operator from the original problem, as well as a set of conditional effects that correspond to noops. */ struct PiMOperator { + // Preconditions and effects reference the proposition IDs. std::vector precondition; std::vector effect; /* In each of the inner vectors, the effect conditions are separated from @@ -37,7 +38,7 @@ struct PiMOperator { int id; }; -// represents a proposition in the P^m problem +// Represents a proposition in the P^m problem. struct HMEntry { // Propositions that belong to this set. const Propositions propositions; @@ -69,9 +70,6 @@ struct HMEntry { } }; -using PropositionSetToIntMap = - std::map; - class LandmarkFactoryHM : public LandmarkFactory { using TriggerSet = std::unordered_map>; @@ -83,8 +81,8 @@ class LandmarkFactoryHM : public LandmarkFactory { std::vector hm_table; std::vector pm_operators; - // Maps each set of < m propositions to an int. - PropositionSetToIntMap set_indices; + // Maps each set of < m propositions to an int representing its ID. + std::map set_indices; /* The number in the first position represents the amount of unsatisfied preconditions of the operator. The vector of numbers in the second From bf748fa2b393f8539f206c357ff740a83540843e Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Fri, 28 Mar 2025 22:12:21 +0100 Subject: [PATCH 069/108] Get rid of list data type in lm_hm. --- src/search/landmarks/landmark_factory_h_m.cc | 160 ++++++------------- src/search/landmarks/landmark_factory_h_m.h | 31 ++-- 2 files changed, 66 insertions(+), 125 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index f2de0e972f..f270e955a9 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -22,92 +22,29 @@ using namespace std; using utils::ExitCode; namespace landmarks { -// alist = alist \cup other -template -static void union_with(list &alist, const list &other) { - auto it1 = alist.begin(); - auto it2 = other.begin(); - - while (it1 != alist.end() && it2 != other.end()) { - if (*it1 < *it2) { - ++it1; - } else if (*it1 > *it2) { - alist.insert(it1, *it2); - ++it2; - } else { - ++it1; - ++it2; - } - } - alist.insert(it1, it2, other.end()); +static void union_inplace( + unordered_set &set1, const unordered_set &set2) { + set1.insert(set2.begin(), set2.end()); } -// alist = alist \cap other -template -static void intersect_with(list &alist, const list &other) { - auto it1 = alist.begin(); - auto it2 = other.begin(); - - while (it1 != alist.end() && it2 != other.end()) { - if (*it1 < *it2) { - auto tmp = it1; - ++tmp; - alist.erase(it1); - it1 = tmp; - } else if (*it1 > *it2) { - ++it2; - } else { - ++it1; - ++it2; +static void intersect_inplace( + unordered_set &set1, const unordered_set &set2) { + unordered_set result; + for (int entry : set1) { + if (set2.contains(entry)) { + result.insert(entry); } } - alist.erase(it1, alist.end()); + swap(set1, result); } -// alist = alist \setminus other -template -static void set_minus(list &alist, const list &other) { - auto it1 = alist.begin(); - auto it2 = other.begin(); - - while (it1 != alist.end() && it2 != other.end()) { - if (*it1 < *it2) { - ++it1; - } else if (*it1 > *it2) { - ++it2; - } else { - auto tmp = it1; - ++tmp; - alist.erase(it1); - it1 = tmp; - ++it2; - } +static void set_minus( + unordered_set &set1, const unordered_set &set2) { + for (int entry : set2) { + set1.erase(entry); } } -// alist = alist \cup {val} -template -static void insert_into(list &alist, const T &val) { - auto it1 = alist.begin(); - - while (it1 != alist.end()) { - if (*it1 > val) { - alist.insert(it1, val); - return; - } else if (*it1 < val) { - ++it1; - } else { - return; - } - } - alist.insert(it1, val); -} - -template -static bool contains(const list &alist, const T &val) { - return find(alist.begin(), alist.end(), val) != alist.end(); -} - static bool are_mutex(const VariablesProxy &variables, const FactPair &atom1, const FactPair &atom2) { return variables[atom1.var].get_fact(atom1.value).is_mutex( @@ -848,37 +785,39 @@ LandmarkFactoryHM::TriggerSet LandmarkFactoryHM::mark_state_propositions_reached } void LandmarkFactoryHM::collect_condition_landmarks( - const vector &condition, list &landmarks, - list &necessary) const { + const vector &condition, unordered_set &landmarks, + unordered_set &necessary) const { /* For each proposition, the proposition itself is not stored even though it is a landmark for itself. */ for (int proposition : condition) { - union_with(landmarks, hm_table[proposition].landmarks); - insert_into(landmarks, proposition); - + union_inplace(landmarks, hm_table[proposition].landmarks); + landmarks.insert(proposition); if (use_orders) { - insert_into(necessary, proposition); + necessary.insert(proposition); } } } void LandmarkFactoryHM::update_effect_landmarks( - int op_id, const vector &effect, int level, const list &landmarks, - const list &necessary, TriggerSet &triggers) { + int op_id, const vector &effect, int level, + const unordered_set &landmarks, const unordered_set &necessary, + TriggerSet &triggers) { for (int proposition : effect) { if (hm_table[proposition].level != -1) { size_t prev_size = hm_table[proposition].landmarks.size(); - intersect_with(hm_table[proposition].landmarks, landmarks); + intersect_inplace(hm_table[proposition].landmarks, landmarks); /* If the effect appears in `landmarks`, the proposition is not achieved for the first time. No need to intersect for greedy-necessary orderings or add `op` to the first achievers. */ - if (!contains(landmarks, proposition)) { - insert_into(hm_table[proposition].first_achievers, op_id); + if (!landmarks.contains(proposition)) { + hm_table[proposition].first_achievers.insert(op_id); if (use_orders) { - intersect_with(hm_table[proposition].prerequisite_landmark, necessary); + intersect_inplace( + hm_table[proposition].prerequisite_landmark, + necessary); } } @@ -891,7 +830,7 @@ void LandmarkFactoryHM::update_effect_landmarks( if (use_orders) { hm_table[proposition].prerequisite_landmark = necessary; } - insert_into(hm_table[proposition].first_achievers, op_id); + hm_table[proposition].first_achievers.insert(op_id); propagate_pm_propositions(proposition, true, triggers); } } @@ -899,7 +838,8 @@ void LandmarkFactoryHM::update_effect_landmarks( void LandmarkFactoryHM::update_noop_landmarks( const unordered_set ¤t_triggers, const PiMOperator &op, - int level, const list &landmarks, const list &necessary, + int level, const unordered_set &landmarks, + const unordered_set &necessary, TriggerSet &next_triggers) { if (current_triggers.empty()) { /* @@ -930,7 +870,7 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { TriggerSet next_trigger; for (int level = 1; !current_trigger.empty(); ++level) { for (auto &[op_id, triggers] : current_trigger) { - list local_landmarks, local_necessary; + unordered_set local_landmarks, local_necessary; PiMOperator &op = pm_operators[op_id]; collect_condition_landmarks( op.precondition, local_landmarks, local_necessary); @@ -952,22 +892,23 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { } void LandmarkFactoryHM::compute_noop_landmarks( - int op_id, int noop_index, const list &landmarks, - const list &necessary, int level, TriggerSet &next_trigger) { + int op_id, int noop_index, const unordered_set &landmarks, + const unordered_set &necessary, int level, TriggerSet &next_trigger) { const vector &conditional_noop = pm_operators[op_id].conditional_noops[noop_index]; const auto &[effect_condition, effect] = split_conditional_noop (conditional_noop); - list cn_landmarks = landmarks; - list cn_necessary; + unordered_set conditional_noop_landmarks = landmarks; + unordered_set conditional_noop_necessary; if (use_orders) { - cn_necessary = necessary; + conditional_noop_necessary = necessary; } - collect_condition_landmarks(effect_condition, cn_landmarks, cn_necessary); - update_effect_landmarks( - op_id, effect, level, cn_landmarks, cn_necessary, next_trigger); + collect_condition_landmarks(effect_condition, conditional_noop_landmarks, + conditional_noop_necessary); + update_effect_landmarks(op_id, effect, level, conditional_noop_landmarks, + conditional_noop_necessary, next_trigger); } void LandmarkFactoryHM::add_landmark_node(int set_index, bool goal) { @@ -985,9 +926,9 @@ void LandmarkFactoryHM::add_landmark_node(int set_index, bool goal) { } } -list LandmarkFactoryHM::collect_and_add_landmarks_to_landmark_graph( +unordered_set LandmarkFactoryHM::collect_and_add_landmarks_to_landmark_graph( const VariablesProxy &variables, const Propositions &goals) { - list landmarks; + unordered_set landmarks; for (const Propositions &goal_subset : get_m_sets(variables, goals)) { assert(set_indices.contains(goal_subset)); int set_index = set_indices[goal_subset]; @@ -1002,9 +943,9 @@ list LandmarkFactoryHM::collect_and_add_landmarks_to_landmark_graph( } } - union_with(landmarks, hm_table[set_index].landmarks); + union_inplace(landmarks, hm_table[set_index].landmarks); // The goal itself is also a landmark. - insert_into(landmarks, set_index); + landmarks.insert(set_index); add_landmark_node(set_index, true); } for (int landmark : landmarks) { @@ -1013,19 +954,20 @@ list LandmarkFactoryHM::collect_and_add_landmarks_to_landmark_graph( return landmarks; } -void LandmarkFactoryHM::reduce_landmarks(const list &landmarks) { +void LandmarkFactoryHM::reduce_landmarks(const unordered_set &landmarks) { assert(use_orders); for (int landmark1 : landmarks) { - list extended_prerequisites = + unordered_set extended_prerequisites = hm_table[landmark1].prerequisite_landmark; for (int landmark2 : hm_table[landmark1].landmarks) { - union_with(extended_prerequisites, hm_table[landmark2].landmarks); + union_inplace(extended_prerequisites, hm_table[landmark2].landmarks); } set_minus(hm_table[landmark1].landmarks, extended_prerequisites); } } -void LandmarkFactoryHM::add_landmark_orderings(const list &landmarks) { +void LandmarkFactoryHM::add_landmark_orderings( + const unordered_set &landmarks) { for (int to : landmarks) { assert(landmark_nodes.contains(to)); for (int from : hm_table[to].prerequisite_landmark) { @@ -1048,7 +990,7 @@ void LandmarkFactoryHM::construct_landmark_graph( Propositions goals = task_properties::get_fact_pairs(task_proxy.get_goals()); VariablesProxy variables = task_proxy.get_variables(); - list landmarks = + unordered_set landmarks = collect_and_add_landmarks_to_landmark_graph(variables, goals); if (use_orders) { reduce_landmarks(landmarks); diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index d17e2e849f..1a63a8999b 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -3,7 +3,6 @@ #include "landmark_factory.h" -#include #include #include @@ -46,16 +45,15 @@ struct HMEntry { // Level 0: present in initial state int level; - // TODO: Can we replace the `list` data type with `set` or even `vector`? - std::list landmarks; + std::unordered_set landmarks; /* Landmarks that are "preconditions" to achieve this `HMEntry`. This set is disjoint from `landmarks` above and used to derive greedy-necessary orderings. */ - std::list prerequisite_landmark; + std::unordered_set prerequisite_landmark; - std::list first_achievers; + std::unordered_set first_achievers; /* The first int represents an operator ID. If the second int is -1 it means @@ -91,10 +89,10 @@ class LandmarkFactoryHM : public LandmarkFactory { */ std::vector>> num_unsatisfied_preconditions; - std::list collect_and_add_landmarks_to_landmark_graph( + std::unordered_set collect_and_add_landmarks_to_landmark_graph( const VariablesProxy &variables, const Propositions &propositions); - void reduce_landmarks(const std::list &landmarks); - void add_landmark_orderings(const std::list &landmarks); + void reduce_landmarks(const std::unordered_set &landmarks); + void add_landmark_orderings(const std::unordered_set &landmarks); void construct_landmark_graph(const TaskProxy &task_proxy); virtual void generate_landmarks( const std::shared_ptr &task) override; @@ -102,20 +100,21 @@ class LandmarkFactoryHM : public LandmarkFactory { TriggerSet mark_state_propositions_reached( const State &state, const VariablesProxy &variables); void collect_condition_landmarks( - const std::vector &condition, std::list &landmarks, - std::list &necessary) + const std::vector &condition, std::unordered_set &landmarks, + std::unordered_set &necessary) const; void update_effect_landmarks( int op_id, const std::vector &effect, int level, - const std::list &landmarks, const std::list &necessary, - TriggerSet &triggers); + const std::unordered_set &landmarks, + const std::unordered_set &necessary, TriggerSet &triggers); void update_noop_landmarks( const std::unordered_set ¤t_triggers, const PiMOperator &op, - int level, const std::list &landmarks, - const std::list &necessary, TriggerSet &next_triggers); + int level, const std::unordered_set &landmarks, + const std::unordered_set &necessary, TriggerSet &next_triggers); void compute_noop_landmarks( - int op_id, int noop_index, const std::list &local_landmarks, - const std::list &local_necessary, int level, + int op_id, int noop_index, + const std::unordered_set &local_landmarks, + const std::unordered_set &local_necessary, int level, TriggerSet &next_trigger); void compute_hm_landmarks(const TaskProxy &task_proxy); From e4cef82ab848e3b8c971cd3ea61bb9ac5c8fae5f Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Fri, 28 Mar 2025 22:29:34 +0100 Subject: [PATCH 070/108] Break overlength lines. --- src/search/landmarks/landmark_factory_h_m.cc | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index f270e955a9..7958d01580 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -63,7 +63,8 @@ void LandmarkFactoryHM::get_m_sets_including_current_var( }); if (use_var) { current.push_back(atom); - get_m_sets(variables, num_included + 1, current_var + 1, current, subsets); + get_m_sets(variables, num_included + 1, current_var + 1, + current, subsets); current.pop_back(); } } @@ -109,7 +110,8 @@ void LandmarkFactoryHM::get_m_sets_of_set_including_current_proposition( // Find all subsets of `superset` with size m or less. void LandmarkFactoryHM::get_m_sets_of_set( const VariablesProxy &variables, int num_included, int current_index, - Propositions ¤t, vector &subsets, const Propositions &superset) { + Propositions ¤t, vector &subsets, + const Propositions &superset) { if (num_included == m) { subsets.push_back(current); return; @@ -123,7 +125,8 @@ void LandmarkFactoryHM::get_m_sets_of_set( get_m_sets_of_set_including_current_proposition( variables, num_included, current_index, current, subsets, superset); // Do not include proposition at `current_index` in set. - get_m_sets_of_set(variables, num_included, current_index + 1, current, subsets, superset); + get_m_sets_of_set(variables, num_included, current_index + 1, + current, subsets, superset); } void LandmarkFactoryHM::get_split_m_sets_including_current_proposition_from_first( @@ -211,8 +214,8 @@ vector LandmarkFactoryHM::get_m_sets( } #ifndef NDEBUG -static bool proposition_variables_disjoint(const Propositions &set1, - const Propositions &set2) { +static bool proposition_variables_disjoint( + const Propositions &set1, const Propositions &set2) { for (auto [var1, val1] : set1) { for (auto [var2, val2] : set2) { if (var1 == var2) { @@ -241,7 +244,8 @@ vector LandmarkFactoryHM::get_split_m_sets( } else if (superset2.empty()) { get_m_sets_of_set(variables, 0, 0, c, subsets, superset1); } else { - get_split_m_sets(variables, 0, 0, 0, 0, c, subsets, superset1, superset2); + get_split_m_sets( + variables, 0, 0, 0, 0, c, subsets, superset1, superset2); } return subsets; } @@ -960,7 +964,8 @@ void LandmarkFactoryHM::reduce_landmarks(const unordered_set &landmarks) { unordered_set extended_prerequisites = hm_table[landmark1].prerequisite_landmark; for (int landmark2 : hm_table[landmark1].landmarks) { - union_inplace(extended_prerequisites, hm_table[landmark2].landmarks); + union_inplace(extended_prerequisites, + hm_table[landmark2].landmarks); } set_minus(hm_table[landmark1].landmarks, extended_prerequisites); } From eb0663c4c309b1d8e31966940e5473c41472bbdc Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Fri, 28 Mar 2025 22:39:13 +0100 Subject: [PATCH 071/108] Fix style. --- src/search/landmarks/landmark_factory_h_m.cc | 22 +++++++++---------- .../landmarks/landmark_factory_rpg_sasp.cc | 22 +++++++++---------- .../landmarks/landmark_factory_zhu_givan.cc | 2 +- src/search/landmarks/util.cc | 2 +- src/search/landmarks/util.h | 2 +- src/search/utils/component_errors.h | 2 +- 6 files changed, 26 insertions(+), 26 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 7958d01580..d8389e6bb2 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -162,7 +162,7 @@ void LandmarkFactoryHM::get_split_m_sets( if (num_included1 + num_included2 == m || (current_index1 == superset1_size && - current_index2 == superset2_size)) { + current_index2 == superset2_size)) { if (num_included1 > 0 && num_included2 > 0) { subsets.push_back(current); } @@ -497,9 +497,9 @@ Propositions LandmarkFactoryHM::initialize_postconditions( } void LandmarkFactoryHM::add_conditional_noop( - PiMOperator &pm_op, int op_id, - const VariablesProxy &variables, const Propositions &propositions, - const Propositions &preconditions, const Propositions &postconditions) { + PiMOperator &pm_op, int op_id, + const VariablesProxy &variables, const Propositions &propositions, + const Propositions &preconditions, const Propositions &postconditions) { int noop_index = static_cast(pm_op.conditional_noops.size()); /* @@ -519,7 +519,7 @@ void LandmarkFactoryHM::add_conditional_noop( static_cast(noop_precondition_subsets.size())); // Add the conditional noop preconditions. - for (const auto & subset : noop_precondition_subsets) { + for (const auto &subset : noop_precondition_subsets) { assert(static_cast(subset.size()) <= m); assert(set_indices.contains(subset)); int set_index = set_indices[subset]; @@ -532,7 +532,7 @@ void LandmarkFactoryHM::add_conditional_noop( conditional_noop.push_back(-1); // Add the conditional noop effects. - for (const auto & subset : noop_postconditions_subsets) { + for (const auto &subset : noop_postconditions_subsets) { assert(static_cast(subset.size()) <= m); assert(set_indices.contains(subset)); int set_index = set_indices[subset]; @@ -561,7 +561,7 @@ void LandmarkFactoryHM::initialize_noops( propositions)) { // For each such set, add a "conditional effect" to the operator. add_conditional_noop(pm_op, op_id, variables, - propositions, preconditions, postconditions); + propositions, preconditions, postconditions); } } } @@ -661,8 +661,8 @@ static bool operator_can_achieve_landmark( continue; } auto mutex = [&](const FactPair &other) { - return are_mutex(variables, atom, other); - }; + return are_mutex(variables, atom, other); + }; if (any_of(postcondition.begin(), postcondition.end(), mutex)) { return false; } @@ -681,7 +681,7 @@ void LandmarkFactoryHM::approximate_possible_achievers( for (int op_id : candidates) { if (operator_can_achieve_landmark( - operators[op_id], landmark, variables)) { + operators[op_id], landmark, variables)) { landmark.possible_achievers.insert(op_id); } } @@ -901,7 +901,7 @@ void LandmarkFactoryHM::compute_noop_landmarks( const vector &conditional_noop = pm_operators[op_id].conditional_noops[noop_index]; const auto &[effect_condition, effect] = - split_conditional_noop (conditional_noop); + split_conditional_noop(conditional_noop); unordered_set conditional_noop_landmarks = landmarks; unordered_set conditional_noop_necessary; diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 3edcc002de..344a8e6adc 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -124,9 +124,9 @@ static void add_binary_variable_conditions( if (atom.var == var_id && initial_state[var_id].get_value() != atom.value) { assert(ranges::none_of(result.begin(), result.end(), - [&](const FactPair &result_atom) { - return result_atom.var == var_id; - })); + [&](const FactPair &result_atom) { + return result_atom.var == var_id; + })); result.insert(initial_state[var_id].get_pair()); break; } @@ -264,7 +264,7 @@ void LandmarkFactoryRpgSasp::add_simple_landmark_and_ordering( Landmark landmark({atom}, false, false); LandmarkNode &simple_landmark_node = - landmark_graph->add_landmark(move (landmark)); + landmark_graph->add_landmark(move(landmark)); open_landmarks.push_back(&simple_landmark_node); if (use_orders) { add_or_replace_ordering_if_stronger(simple_landmark_node, node, type); @@ -278,9 +278,10 @@ void LandmarkFactoryRpgSasp::add_simple_landmark_and_ordering( // Returns true if an overlapping landmark exists already. bool LandmarkFactoryRpgSasp::deal_with_overlapping_landmarks( const set &atoms, LandmarkNode &node, OrderingType type) const { - if (ranges::any_of(atoms.begin(), atoms.end(), [&](const FactPair &atom) { - return landmark_graph->contains_simple_landmark(atom); - })) { + if (ranges::any_of( + atoms.begin(), atoms.end(), [&](const FactPair &atom) { + return landmark_graph->contains_simple_landmark(atom); + })) { /* Do not add the landmark because the simple one is stronger. Do not add the ordering(s) to the corresponding simple landmark(s) as they are @@ -543,8 +544,7 @@ void LandmarkFactoryRpgSasp::generate_disjunctive_precondition_landmarks( [&](const FactPair &atom) { /* TODO: Is there a good reason why not? We allow simple landmarks to hold in the initial state. */ - return initial_state[atom.var].get_value() == - atom.value; + return initial_state[atom.var].get_value() == atom.value; })) { add_disjunctive_landmark_and_ordering( preconditions, *node, OrderingType::GREEDY_NECESSARY); @@ -670,8 +670,8 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orderings( const FactPair init_atom = task_proxy.get_initial_state()[landmark_atom.var].get_pair(); vector critical_predecessors = get_critical_dtg_predecessors( - init_atom.value, landmark_atom.value, - reached[landmark_atom.var], dtg_successors[landmark_atom.var]); + init_atom.value, landmark_atom.value, + reached[landmark_atom.var], dtg_successors[landmark_atom.var]); for (int value : critical_predecessors) { add_simple_landmark_and_ordering(FactPair(landmark_atom.var, value), *node, OrderingType::NATURAL); diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index 1b774b50e9..4d4e5ec3e7 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -219,7 +219,7 @@ static bool propagate_labels( LandmarkSet &labels, const LandmarkSet &new_labels, const FactPair &atom) { int old_labels_size = static_cast(labels.size()); - // If this is the first time `atom` is reached, it has an empty label set. + // If this is the first time `atom` is reached, it has an empty label set. if (labels.empty()) { labels = new_labels; } else { diff --git a/src/search/landmarks/util.cc b/src/search/landmarks/util.cc index f7b3f77d29..62245b60df 100644 --- a/src/search/landmarks/util.cc +++ b/src/search/landmarks/util.cc @@ -51,7 +51,7 @@ utils::HashSet get_intersection( } void union_inplace(utils::HashSet &set1, - const utils::HashSet &set2) { + const utils::HashSet &set2) { set1.insert(set2.begin(), set2.end()); } diff --git a/src/search/landmarks/util.h b/src/search/landmarks/util.h index 626884fae1..1b91e130b7 100644 --- a/src/search/landmarks/util.h +++ b/src/search/landmarks/util.h @@ -25,7 +25,7 @@ extern bool possibly_reaches_landmark( extern utils::HashSet get_intersection( const utils::HashSet &set1, const utils::HashSet &set2); extern void union_inplace(utils::HashSet &set1, - const utils::HashSet &set2); + const utils::HashSet &set2); extern OperatorProxy get_operator_or_axiom( const TaskProxy &task_proxy, int op_or_axiom_id); diff --git a/src/search/utils/component_errors.h b/src/search/utils/component_errors.h index cdc0e8c3d2..058364013d 100644 --- a/src/search/utils/component_errors.h +++ b/src/search/utils/component_errors.h @@ -19,7 +19,7 @@ void verify_list_not_empty( const std::vector &list, const std::string &name) { if (list.empty()) { throw ComponentArgumentError( - "List argument '" + name + "' has to be non-empty."); + "List argument '" + name + "' has to be non-empty."); } } } From 96df731ed3d783ba3dcb1be9cdbb81cef9d52324 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 1 Apr 2025 10:00:09 +0200 Subject: [PATCH 072/108] Fix style. --- src/search/landmarks/landmark_cost_partitioning_algorithms.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc index c3842586a9..b41ac2fa15 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc @@ -95,7 +95,7 @@ vector UniformCostPartitioningAlgorithm::second_pass( if (covered_by_action_landmark) { for (int op_id : achievers) { assert(utils::in_bounds( - op_id, landmarks_achieved_by_operator)); + op_id, landmarks_achieved_by_operator)); --landmarks_achieved_by_operator[op_id]; } } else { From 0fc6e08c4c703c771775ab5f15cac42fd5f1a17a Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 1 Apr 2025 15:02:36 +0200 Subject: [PATCH 073/108] Turn identical check into superset check to get same code as before refactoring. --- src/search/landmarks/landmark_factory_merged.cc | 2 +- src/search/landmarks/landmark_factory_rpg_sasp.cc | 10 ++++------ src/search/landmarks/landmark_graph.cc | 4 ++-- src/search/landmarks/landmark_graph.h | 2 +- 4 files changed, 8 insertions(+), 10 deletions(-) diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index 58dc70571b..039ea465db 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -26,7 +26,7 @@ LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( const Landmark &landmark) const { if (landmark.is_disjunctive) { set atoms(landmark.atoms.begin(), landmark.atoms.end()); - if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) { + if (landmark_graph->contains_superset_disjunctive_landmark(atoms)) { return &landmark_graph->get_disjunctive_landmark_node( landmark.atoms[0]); } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 344a8e6adc..ac654c78ff 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -290,13 +290,11 @@ bool LandmarkFactoryRpgSasp::deal_with_overlapping_landmarks( return true; } if (landmark_graph->contains_overlapping_disjunctive_landmark(atoms)) { - if (landmark_graph->contains_identical_disjunctive_landmark(atoms)) { - LandmarkNode &new_landmark_node = + if (use_orders && + landmark_graph->contains_superset_disjunctive_landmark(atoms)) { + LandmarkNode &other = landmark_graph->get_disjunctive_landmark_node(*atoms.begin()); - if (use_orders) { - add_or_replace_ordering_if_stronger( - new_landmark_node, node, type); - } + add_or_replace_ordering_if_stronger(other, node, type); } return true; } diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index b04bcd7f62..cac57aecc7 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -59,7 +59,7 @@ bool LandmarkGraph::contains_overlapping_disjunctive_landmark( }); } -bool LandmarkGraph::contains_identical_disjunctive_landmark( +bool LandmarkGraph::contains_superset_disjunctive_landmark( const set &atoms) const { assert(!atoms.empty()); const LandmarkNode *node = nullptr; @@ -75,7 +75,7 @@ bool LandmarkGraph::contains_identical_disjunctive_landmark( } } assert(node); - return atoms.size() == node->get_landmark().atoms.size(); + return true; } bool LandmarkGraph::contains_landmark(const FactPair &atom) const { diff --git a/src/search/landmarks/landmark_graph.h b/src/search/landmarks/landmark_graph.h index 72ac95ce5f..d1d0c19877 100644 --- a/src/search/landmarks/landmark_graph.h +++ b/src/search/landmarks/landmark_graph.h @@ -140,7 +140,7 @@ class LandmarkGraph { const std::set &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ - bool contains_identical_disjunctive_landmark( + bool contains_superset_disjunctive_landmark( const std::set &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. It is not needed by From 21e86ca2f7165ec4de188c363c774daaa9223a71 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 1 Apr 2025 15:04:47 +0200 Subject: [PATCH 074/108] Use hash sets for soon-to-be-landmarks. --- src/search/landmarks/landmark_factory_merged.cc | 3 ++- .../landmarks/landmark_factory_rpg_sasp.cc | 17 +++++++++-------- .../landmarks/landmark_factory_rpg_sasp.h | 8 ++++---- src/search/landmarks/landmark_graph.cc | 4 ++-- src/search/landmarks/landmark_graph.h | 4 ++-- 5 files changed, 19 insertions(+), 17 deletions(-) diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index 039ea465db..2e2d8fbeaa 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -25,7 +25,8 @@ LandmarkFactoryMerged::LandmarkFactoryMerged( LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( const Landmark &landmark) const { if (landmark.is_disjunctive) { - set atoms(landmark.atoms.begin(), landmark.atoms.end()); + utils::HashSet atoms( + landmark.atoms.begin(), landmark.atoms.end()); if (landmark_graph->contains_superset_disjunctive_landmark(atoms)) { return &landmark_graph->get_disjunctive_landmark_node( landmark.atoms[0]); diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index ac654c78ff..6089cb8f20 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -277,7 +277,8 @@ void LandmarkFactoryRpgSasp::add_simple_landmark_and_ordering( // Returns true if an overlapping landmark exists already. bool LandmarkFactoryRpgSasp::deal_with_overlapping_landmarks( - const set &atoms, LandmarkNode &node, OrderingType type) const { + const utils::HashSet &atoms, LandmarkNode &node, + OrderingType type) const { if (ranges::any_of( atoms.begin(), atoms.end(), [&](const FactPair &atom) { return landmark_graph->contains_simple_landmark(atom); @@ -302,7 +303,8 @@ bool LandmarkFactoryRpgSasp::deal_with_overlapping_landmarks( } void LandmarkFactoryRpgSasp::add_disjunctive_landmark_and_ordering( - const set &atoms, LandmarkNode &node, OrderingType type) { + const utils::HashSet &atoms, LandmarkNode &node, + OrderingType type) { assert(atoms.size() > 1); bool overlaps = deal_with_overlapping_landmarks(atoms, node, type); @@ -453,17 +455,16 @@ void LandmarkFactoryRpgSasp::extend_disjunction_class_lookups( } } -static vector> get_disjunctive_preconditions( +static vector> get_disjunctive_preconditions( const unordered_map> &preconditions_by_disjunction_class, const unordered_map> &used_operators_by_disjunction_class, int num_ops) { - vector> disjunctive_preconditions; + vector> disjunctive_preconditions; for (const auto &[disjunction_class, atoms] : preconditions_by_disjunction_class) { int used_operators = static_cast( used_operators_by_disjunction_class.at(disjunction_class).size()); if (used_operators == num_ops) { - set preconditions; - preconditions.insert(atoms.begin(), atoms.end()); + utils::HashSet preconditions(atoms.begin(), atoms.end()); if (preconditions.size() > 1) { disjunctive_preconditions.push_back(preconditions); } // Otherwise this landmark is not actually a disjunctive landmark. @@ -479,7 +480,7 @@ static vector> get_disjunctive_preconditions( atom from each of the operators, which we additionally restrict so that each atom in the set stems from the same disjunction class. */ -vector> LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( +vector> LandmarkFactoryRpgSasp::compute_disjunctive_preconditions( const TaskProxy &task_proxy, const Landmark &landmark, const vector> &reached) const { vector op_or_axiom_ids = @@ -532,7 +533,7 @@ void LandmarkFactoryRpgSasp::generate_disjunctive_precondition_landmarks( const TaskProxy &task_proxy, const State &initial_state, const Landmark &landmark, LandmarkNode *node, const vector> &reached) { - vector> disjunctive_preconditions = + vector> disjunctive_preconditions = compute_disjunctive_preconditions(task_proxy, landmark, reached); for (const auto &preconditions : disjunctive_preconditions) { /* We don't want disjunctive landmarks to get too big. Also, diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index 534f9178dd..44d979aabd 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -48,7 +48,7 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { const utils::HashSet &landmark_preconditions, int op_id, std::unordered_map> &preconditions, std::unordered_map> &used_operators) const; - std::vector> compute_disjunctive_preconditions( + std::vector> compute_disjunctive_preconditions( const TaskProxy &task_proxy, const Landmark &landmark, const std::vector> &reached) const; @@ -70,12 +70,12 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { LandmarkNode &simple_landmark_node); void add_simple_landmark_and_ordering( const FactPair &atom, LandmarkNode &node, OrderingType type); - // TODO: Can we use something different than set in the next two? bool deal_with_overlapping_landmarks( - const std::set &atoms, LandmarkNode &node, + const utils::HashSet &atoms, LandmarkNode &node, OrderingType type) const; void add_disjunctive_landmark_and_ordering( - const std::set &atoms, LandmarkNode &node, OrderingType type); + const utils::HashSet &atoms, LandmarkNode &node, + OrderingType type); void approximate_lookahead_orderings( const TaskProxy &task_proxy, const std::vector> &reached, LandmarkNode *node); diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index cac57aecc7..387f72dd37 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -53,14 +53,14 @@ bool LandmarkGraph::contains_disjunctive_landmark(const FactPair &atom) const { } bool LandmarkGraph::contains_overlapping_disjunctive_landmark( - const set &atoms) const { + const utils::HashSet &atoms) const { return any_of(atoms.begin(), atoms.end(), [&](const FactPair &atom) { return contains_disjunctive_landmark(atom); }); } bool LandmarkGraph::contains_superset_disjunctive_landmark( - const set &atoms) const { + const utils::HashSet &atoms) const { assert(!atoms.empty()); const LandmarkNode *node = nullptr; for (const FactPair &atom : atoms) { diff --git a/src/search/landmarks/landmark_graph.h b/src/search/landmarks/landmark_graph.h index d1d0c19877..23c1e2b826 100644 --- a/src/search/landmarks/landmark_graph.h +++ b/src/search/landmarks/landmark_graph.h @@ -137,11 +137,11 @@ class LandmarkGraph { when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ bool contains_overlapping_disjunctive_landmark( - const std::set &atoms) const; + const utils::HashSet &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ bool contains_superset_disjunctive_landmark( - const std::set &atoms) const; + const utils::HashSet &atoms) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ From 9bb0b271d329e48848bd32c8a541ff4dc591879c Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 1 Apr 2025 18:07:07 +0200 Subject: [PATCH 075/108] Clean up some TODO comments. --- src/search/landmarks/exploration.cc | 1 - src/search/landmarks/landmark_cost_partitioning_algorithms.h | 2 +- src/search/landmarks/landmark_factory_merged.cc | 2 +- .../landmarks/landmark_factory_reasonable_orders_hps.cc | 2 +- src/search/landmarks/landmark_factory_rpg_sasp.cc | 5 ----- src/search/landmarks/landmark_factory_zhu_givan.cc | 2 +- 6 files changed, 4 insertions(+), 10 deletions(-) diff --git a/src/search/landmarks/exploration.cc b/src/search/landmarks/exploration.cc index 70c4206a1d..7c18549afa 100644 --- a/src/search/landmarks/exploration.cc +++ b/src/search/landmarks/exploration.cc @@ -118,7 +118,6 @@ void Exploration::build_unary_operators(const OperatorProxy &op) { vector preconditions; int op_or_axiom_id = get_operator_or_axiom_id(op); - // TODO: Maybe the problem is with the new sorting? for (FactProxy pre : op.get_preconditions()) { preconditions.push_back(pre.get_pair()); } diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.h b/src/search/landmarks/landmark_cost_partitioning_algorithms.h index 65d70dce7d..664c9873f1 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.h +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.h @@ -43,7 +43,7 @@ class UniformCostPartitioningAlgorithm : public CostPartitioningAlgorithm { breaking apart long functions) without changing its behavior. Since we would like to implement computing the cost partitioning differently, and because these functions do not have just one simple purpose, we did not - bother trying to find descriptive function names. + bother trying to find descriptive function names at this time. */ double first_pass( std::vector &landmarks_achieved_by_operator, diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index 2e2d8fbeaa..d0b1ac506f 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -66,7 +66,7 @@ void LandmarkFactoryMerged::add_simple_landmarks( log << "Adding simple landmarks" << endl; } for (auto &landmark_graph : landmark_graphs) { - // TODO: loop over landmarks instead + // TODO: Loop over landmarks instead. for (const auto &node : *landmark_graph) { const Landmark &landmark = node->get_landmark(); if (landmark.is_conjunctive) { diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 6701600ea3..acf052efd2 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -343,7 +343,7 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( /* Experimentally commenting this out -- see issue202. TODO: This code became unreachable and no longer works after - all the refactorings we did recently. + all the refactorings we did recently. Maybe we should just remove it? // Case 3: There exists an atom X inconsistent with B such that X->_gn A. for (const auto &parent : node_a->parents) { const LandmarkNode &node = *parent.first; diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 6089cb8f20..9ff1485280 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -135,11 +135,6 @@ static void add_binary_variable_conditions( } } -/* - TODO: This (accidentally) differs from the previous implementation which - imposed a stricter condition. Specifically, it also intersected over the - conditions of effects that have nothing to do with achieving the landmark. -*/ static void add_effect_conditions( const Landmark &landmark, const EffectsProxy &effects, utils::HashSet &result) { diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index 4d4e5ec3e7..77e09ada33 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -204,7 +204,7 @@ bool LandmarkFactoryZhuGivan::conditional_effect_fires( LandmarkSet LandmarkFactoryZhuGivan::union_of_condition_labels( const ConditionsProxy &conditions, const PropositionLayer ¤t) { - /* TODO This looks like an O(n^2) algorithm where O(n log n) would + /* TODO: This looks like an O(n^2) algorithm where O(n log n) would do, a bit like the Python string concatenation anti-pattern. */ LandmarkSet result; for (FactProxy precondition : conditions) { From 674f17d617fc31ce8bdda6bf0bc6ffb1def5dfe9 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 2 Apr 2025 10:51:01 +0200 Subject: [PATCH 076/108] Fix lm_merge. --- src/search/landmarks/landmark_factory_merged.cc | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index d0b1ac506f..6749da9dbf 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -8,6 +8,8 @@ #include +#include "util.h" + using namespace std; using utils::ExitCode; @@ -65,9 +67,9 @@ void LandmarkFactoryMerged::add_simple_landmarks( if (log.is_at_least_normal()) { log << "Adding simple landmarks" << endl; } - for (auto &landmark_graph : landmark_graphs) { + for (const auto &graph_to_merge : landmark_graphs) { // TODO: Loop over landmarks instead. - for (const auto &node : *landmark_graph) { + for (const auto &node : *graph_to_merge) { const Landmark &landmark = node->get_landmark(); if (landmark.is_conjunctive) { cerr << "Don't know how to handle conjunctive landmarks yet" @@ -77,6 +79,7 @@ void LandmarkFactoryMerged::add_simple_landmarks( if (landmark.is_disjunctive) { continue; } + assert(landmark.atoms.size() == 1); if (!landmark_graph->contains_landmark(landmark.atoms[0])) { Landmark copy(landmark); landmark_graph->add_landmark(move(copy)); From 314e18e37b4278d5c9d47d65101acc15d4f158a7 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 2 Apr 2025 11:36:25 +0200 Subject: [PATCH 077/108] Improve set intersection in lm_hm. --- src/search/landmarks/landmark_factory_h_m.cc | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index d8389e6bb2..ac6bad6ad5 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -27,15 +27,11 @@ static void union_inplace( set1.insert(set2.begin(), set2.end()); } -static void intersect_inplace( +static void intersection_inplace( unordered_set &set1, const unordered_set &set2) { - unordered_set result; - for (int entry : set1) { - if (set2.contains(entry)) { - result.insert(entry); - } - } - swap(set1, result); + erase_if(set1, [&set2](int element) { + return !set2.contains(element); + }); } static void set_minus( @@ -809,7 +805,7 @@ void LandmarkFactoryHM::update_effect_landmarks( for (int proposition : effect) { if (hm_table[proposition].level != -1) { size_t prev_size = hm_table[proposition].landmarks.size(); - intersect_inplace(hm_table[proposition].landmarks, landmarks); + intersection_inplace(hm_table[proposition].landmarks, landmarks); /* If the effect appears in `landmarks`, the proposition is not @@ -819,7 +815,7 @@ void LandmarkFactoryHM::update_effect_landmarks( if (!landmarks.contains(proposition)) { hm_table[proposition].first_achievers.insert(op_id); if (use_orders) { - intersect_inplace( + intersection_inplace( hm_table[proposition].prerequisite_landmark, necessary); } @@ -874,8 +870,9 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { TriggerSet next_trigger; for (int level = 1; !current_trigger.empty(); ++level) { for (auto &[op_id, triggers] : current_trigger) { - unordered_set local_landmarks, local_necessary; PiMOperator &op = pm_operators[op_id]; + unordered_set local_landmarks(op.precondition.size()), + local_necessary(op.precondition.size()); collect_condition_landmarks( op.precondition, local_landmarks, local_necessary); update_effect_landmarks(op_id, op.effect, level, local_landmarks, From a14cc00b7629221cbd7aa83ba7fce2dfeb21fc38 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 2 Apr 2025 12:13:35 +0200 Subject: [PATCH 078/108] Clarify data structure. --- src/search/landmarks/landmark_factory_h_m.cc | 57 ++++++-------------- src/search/landmarks/landmark_factory_h_m.h | 11 ++-- 2 files changed, 24 insertions(+), 44 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index ac6bad6ad5..09dba41387 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -321,32 +321,13 @@ void LandmarkFactoryHM::print_pm_operator( } } -static pair, vector> split_conditional_noop( - const vector &conditional_noop) { - vector effect_condition; - effect_condition.reserve(conditional_noop.size()); - size_t i; - for (i = 0; conditional_noop[i] != -1; ++i) { - effect_condition.push_back(conditional_noop[i]); - } - - ++i; // Skip delimiter -1. - - vector effect; - effect.reserve(conditional_noop.size()); - for (; i < conditional_noop.size(); ++i) { - effect.push_back(conditional_noop[i]); - } - return {effect_condition, effect}; -} - void LandmarkFactoryHM::print_conditional_noop( - const VariablesProxy &variables, const vector &conditional_noop, + const VariablesProxy &variables, const ConditionalNoop &conditional_noop, vector, set>> &conditions) const { - auto [effect_condition, effect] = split_conditional_noop(conditional_noop); set effect_condition_set = - print_effect_condition(variables, effect_condition); - set effect_set = print_conditional_effect(variables, effect); + print_effect_condition(variables, conditional_noop.effect_condition); + set effect_set = + print_conditional_effect(variables, conditional_noop.effect); conditions.emplace_back(effect_condition_set, effect_set); log << endl << endl << endl; } @@ -508,34 +489,33 @@ void LandmarkFactoryHM::add_conditional_noop( vector noop_postconditions_subsets = get_split_m_sets(variables, postconditions, propositions); - vector conditional_noop; - conditional_noop.reserve(noop_precondition_subsets.size() + - noop_postconditions_subsets.size() + 1); num_unsatisfied_preconditions[op_id].second.push_back( static_cast(noop_precondition_subsets.size())); - // Add the conditional noop preconditions. + // Compute the conditional noop preconditions. + vector noop_condition; + noop_condition.reserve(noop_precondition_subsets.size()); for (const auto &subset : noop_precondition_subsets) { assert(static_cast(subset.size()) <= m); assert(set_indices.contains(subset)); int set_index = set_indices[subset]; - conditional_noop.push_back(set_index); + noop_condition.push_back(set_index); // These propositions are "conditional preconditions" for this operator. hm_table[set_index].triggered_operators.emplace_back(op_id, noop_index); } - // Separate conditional preconditions from conditional effects by number -1. - conditional_noop.push_back(-1); - - // Add the conditional noop effects. + // Compute the conditional noop effects. + vector noop_effect; + noop_effect.reserve(noop_postconditions_subsets.size()); for (const auto &subset : noop_postconditions_subsets) { assert(static_cast(subset.size()) <= m); assert(set_indices.contains(subset)); int set_index = set_indices[subset]; - conditional_noop.push_back(set_index); + noop_effect.push_back(set_index); } - pm_op.conditional_noops.push_back(move(conditional_noop)); + pm_op.conditional_noops.emplace_back( + move(noop_condition), move(noop_effect)); } void LandmarkFactoryHM::initialize_noops( @@ -869,10 +849,9 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { task_proxy.get_initial_state(), task_proxy.get_variables()); TriggerSet next_trigger; for (int level = 1; !current_trigger.empty(); ++level) { - for (auto &[op_id, triggers] : current_trigger) { + for (const auto &[op_id, triggers] : current_trigger) { PiMOperator &op = pm_operators[op_id]; - unordered_set local_landmarks(op.precondition.size()), - local_necessary(op.precondition.size()); + unordered_set local_landmarks, local_necessary; collect_condition_landmarks( op.precondition, local_landmarks, local_necessary); update_effect_landmarks(op_id, op.effect, level, local_landmarks, @@ -895,10 +874,8 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { void LandmarkFactoryHM::compute_noop_landmarks( int op_id, int noop_index, const unordered_set &landmarks, const unordered_set &necessary, int level, TriggerSet &next_trigger) { - const vector &conditional_noop = - pm_operators[op_id].conditional_noops[noop_index]; const auto &[effect_condition, effect] = - split_conditional_noop(conditional_noop); + pm_operators[op_id].conditional_noops[noop_index]; unordered_set conditional_noop_landmarks = landmarks; unordered_set conditional_noop_necessary; diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 1a63a8999b..ab91e8ac2f 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -25,15 +25,18 @@ struct PropositionSetComparer { } }; +struct ConditionalNoop { + std::vector effect_condition; + std::vector effect; +}; + /* Corresponds to an operator from the original problem, as well as a set of conditional effects that correspond to noops. */ struct PiMOperator { // Preconditions and effects reference the proposition IDs. std::vector precondition; std::vector effect; - /* In each of the inner vectors, the effect conditions are separated from - the effect values by an entry of the value -1. */ - std::vector> conditional_noops; + std::vector conditional_noops; int id; }; @@ -161,7 +164,7 @@ class LandmarkFactoryHM : public LandmarkFactory { const VariablesProxy &variables, const PiMOperator &op) const; void print_conditional_noop( const VariablesProxy &variables, - const std::vector &conditional_noop, + const ConditionalNoop &conditional_noop, std::vector, std::set>> &conditions) const; std::set print_effect_condition( const VariablesProxy &variables, From 190cb66ae1edfa18c4cb8ab0a8428108f05ae75d Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 2 Apr 2025 16:47:18 +0200 Subject: [PATCH 079/108] Optimize hm landmark generation. --- src/search/landmarks/landmark_factory_h_m.cc | 184 ++++++++++--------- src/search/landmarks/landmark_factory_h_m.h | 34 ++-- 2 files changed, 120 insertions(+), 98 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 09dba41387..7ca019a064 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -22,11 +22,6 @@ using namespace std; using utils::ExitCode; namespace landmarks { -static void union_inplace( - unordered_set &set1, const unordered_set &set2) { - set1.insert(set2.begin(), set2.end()); -} - static void intersection_inplace( unordered_set &set1, const unordered_set &set2) { erase_if(set1, [&set2](int element) { @@ -36,9 +31,9 @@ static void intersection_inplace( static void set_minus( unordered_set &set1, const unordered_set &set2) { - for (int entry : set2) { - set1.erase(entry); - } + erase_if(set1, [&set2](int element) { + return set2.contains(element); + }); } static bool are_mutex(const VariablesProxy &variables, @@ -726,9 +721,9 @@ void LandmarkFactoryHM::trigger_conditional_noop( // Triggers which operators are reevaluated at the next level. void LandmarkFactoryHM::propagate_pm_propositions( - int proposition_id, bool newly_discovered, TriggerSet &trigger) { + HMEntry &hm_entry, bool newly_discovered, TriggerSet &trigger) { // For each operator/noop for which the proposition is a precondition. - for (auto [op_id, noop_id] : hm_table[proposition_id].triggered_operators) { + for (auto [op_id, noop_id] : hm_entry.triggered_operators) { if (noop_id == -1) { // The proposition is a precondition for the operator itself. trigger_operator(op_id, newly_discovered, trigger); @@ -744,9 +739,9 @@ LandmarkFactoryHM::TriggerSet LandmarkFactoryHM::mark_state_propositions_reached TriggerSet triggers; for (const auto &proposition : state_propositions) { - int index = set_indices[proposition]; - hm_table[index].level = 0; - propagate_pm_propositions(index, true, triggers); + HMEntry &hm_entry = hm_table[set_indices[proposition]]; + hm_entry.reached = true; + propagate_pm_propositions(hm_entry, true, triggers); } /* This is necessary to trigger operators without preconditions which are @@ -765,62 +760,75 @@ LandmarkFactoryHM::TriggerSet LandmarkFactoryHM::mark_state_propositions_reached } void LandmarkFactoryHM::collect_condition_landmarks( - const vector &condition, unordered_set &landmarks, - unordered_set &necessary) const { + const vector &condition, unordered_set &landmarks) const { + for (int proposition : condition) { + const unordered_set &proposition_landmarks = + hm_table[proposition].landmarks; + landmarks.insert( + proposition_landmarks.begin(), proposition_landmarks.end()); + } /* For each proposition, the proposition itself is not stored even though it is a landmark for itself. */ - for (int proposition : condition) { - union_inplace(landmarks, hm_table[proposition].landmarks); - landmarks.insert(proposition); + landmarks.insert(condition.begin(), condition.end()); +} + +void LandmarkFactoryHM::initialize_proposition_landmark( + int op_id, HMEntry &hm_entry, const unordered_set &landmarks, + const unordered_set &precondition_landmarks, TriggerSet &triggers) { + hm_entry.reached = true; + hm_entry.landmarks = landmarks; + if (use_orders) { + hm_entry.precondition_landmarks = precondition_landmarks; + } + hm_entry.first_achievers.insert(op_id); + propagate_pm_propositions(hm_entry, true, triggers); +} + +void LandmarkFactoryHM::update_proposition_landmark( + int op_id, int proposition, const unordered_set &landmarks, + const unordered_set &precondition_landmarks, TriggerSet &triggers) { + HMEntry &hm_entry = hm_table[proposition]; + size_t prev_size = hm_entry.landmarks.size(); + intersection_inplace(hm_entry.landmarks, landmarks); + + /* + If the effect appears in `landmarks`, the proposition is not + achieved for the first time. No need to intersect for + greedy-necessary orderings or add `op` to the first achievers. + */ + if (!landmarks.contains(proposition)) { + hm_entry.first_achievers.insert(op_id); if (use_orders) { - necessary.insert(proposition); + intersection_inplace( + hm_entry.precondition_landmarks, precondition_landmarks); } } + + if (hm_entry.landmarks.size() != prev_size) { + propagate_pm_propositions(hm_entry, false, triggers); + } } void LandmarkFactoryHM::update_effect_landmarks( - int op_id, const vector &effect, int level, - const unordered_set &landmarks, const unordered_set &necessary, - TriggerSet &triggers) { + int op_id, const vector &effect, const unordered_set &landmarks, + const unordered_set &precondition_landmarks, TriggerSet &triggers) { for (int proposition : effect) { - if (hm_table[proposition].level != -1) { - size_t prev_size = hm_table[proposition].landmarks.size(); - intersection_inplace(hm_table[proposition].landmarks, landmarks); - - /* - If the effect appears in `landmarks`, the proposition is not - achieved for the first time. No need to intersect for - greedy-necessary orderings or add `op` to the first achievers. - */ - if (!landmarks.contains(proposition)) { - hm_table[proposition].first_achievers.insert(op_id); - if (use_orders) { - intersection_inplace( - hm_table[proposition].prerequisite_landmark, - necessary); - } - } - - if (hm_table[proposition].landmarks.size() != prev_size) { - propagate_pm_propositions(proposition, false, triggers); - } + HMEntry &hm_entry = hm_table[proposition]; + if (hm_entry.reached) { + update_proposition_landmark(op_id, proposition, landmarks, + precondition_landmarks, triggers); } else { - hm_table[proposition].level = level; - hm_table[proposition].landmarks = landmarks; - if (use_orders) { - hm_table[proposition].prerequisite_landmark = necessary; - } - hm_table[proposition].first_achievers.insert(op_id); - propagate_pm_propositions(proposition, true, triggers); + initialize_proposition_landmark( + op_id, hm_entry, landmarks, precondition_landmarks, + triggers); } } } void LandmarkFactoryHM::update_noop_landmarks( const unordered_set ¤t_triggers, const PiMOperator &op, - int level, const unordered_set &landmarks, - const unordered_set &necessary, - TriggerSet &next_triggers) { + const unordered_set &landmarks, + const unordered_set &prerequisites, TriggerSet &next_triggers) { if (current_triggers.empty()) { /* The landmarks for the operator have changed, so we have to recompute @@ -831,7 +839,7 @@ void LandmarkFactoryHM::update_noop_landmarks( for (int i = 0; i < num_noops; ++i) { if (num_unsatisfied_preconditions[op.id].second[i] == 0) { compute_noop_landmarks( - op.id, i, landmarks, necessary, level, next_triggers); + op.id, i, landmarks, prerequisites, next_triggers); } } } else { @@ -839,7 +847,7 @@ void LandmarkFactoryHM::update_noop_landmarks( for (int noop_it : current_triggers) { assert(num_unsatisfied_preconditions[op.id].second[noop_it] == 0); compute_noop_landmarks( - op.id, noop_it, landmarks, necessary, level, next_triggers); + op.id, noop_it, landmarks, prerequisites, next_triggers); } } } @@ -847,20 +855,23 @@ void LandmarkFactoryHM::update_noop_landmarks( void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { TriggerSet current_trigger = mark_state_propositions_reached( task_proxy.get_initial_state(), task_proxy.get_variables()); - TriggerSet next_trigger; for (int level = 1; !current_trigger.empty(); ++level) { + TriggerSet next_trigger; for (const auto &[op_id, triggers] : current_trigger) { PiMOperator &op = pm_operators[op_id]; - unordered_set local_landmarks, local_necessary; - collect_condition_landmarks( - op.precondition, local_landmarks, local_necessary); - update_effect_landmarks(op_id, op.effect, level, local_landmarks, - local_necessary, next_trigger); - update_noop_landmarks(triggers, op, level, local_landmarks, - local_necessary, next_trigger); + unordered_set landmarks, precondition_landmarks; + const vector &precondition = op.precondition; + collect_condition_landmarks(precondition, landmarks); + if (use_orders) { + precondition_landmarks.insert( + precondition.begin(), precondition.end()); + } + update_effect_landmarks(op_id, op.effect, landmarks, + precondition_landmarks, next_trigger); + update_noop_landmarks( + triggers, op, landmarks, precondition_landmarks, next_trigger); } current_trigger.swap(next_trigger); - next_trigger.clear(); if (log.is_at_least_verbose()) { log << "Level " << level << " completed." << endl; @@ -873,20 +884,22 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { void LandmarkFactoryHM::compute_noop_landmarks( int op_id, int noop_index, const unordered_set &landmarks, - const unordered_set &necessary, int level, TriggerSet &next_trigger) { + const unordered_set &necessary, TriggerSet &next_trigger) { const auto &[effect_condition, effect] = pm_operators[op_id].conditional_noops[noop_index]; - unordered_set conditional_noop_landmarks = landmarks; + unordered_set conditional_noop_landmarks(landmarks); + collect_condition_landmarks(effect_condition, conditional_noop_landmarks); unordered_set conditional_noop_necessary; if (use_orders) { conditional_noop_necessary = necessary; + conditional_noop_landmarks.insert( + effect_condition.begin(), effect_condition.end()); } - collect_condition_landmarks(effect_condition, conditional_noop_landmarks, - conditional_noop_necessary); - update_effect_landmarks(op_id, effect, level, conditional_noop_landmarks, - conditional_noop_necessary, next_trigger); + update_effect_landmarks( + op_id, effect, move(conditional_noop_landmarks), + move(conditional_noop_necessary), next_trigger); } void LandmarkFactoryHM::add_landmark_node(int set_index, bool goal) { @@ -909,22 +922,25 @@ unordered_set LandmarkFactoryHM::collect_and_add_landmarks_to_landmark_grap unordered_set landmarks; for (const Propositions &goal_subset : get_m_sets(variables, goals)) { assert(set_indices.contains(goal_subset)); - int set_index = set_indices[goal_subset]; + int proposition_id = set_indices[goal_subset]; - if (hm_table[set_index].level == -1) { + if (!hm_table[proposition_id].reached) { if (log.is_at_least_verbose()) { log << "\n\nSubset of goal not reachable !!.\n\n\n"; log << "Subset is: "; print_proposition_set( - variables, hm_table[set_index].propositions); + variables, hm_table[proposition_id].propositions); log << endl; } } - union_inplace(landmarks, hm_table[set_index].landmarks); + const unordered_set &proposition_landmarks = + hm_table[proposition_id].landmarks; + landmarks.insert( + proposition_landmarks.begin(), proposition_landmarks.end()); // The goal itself is also a landmark. - landmarks.insert(set_index); - add_landmark_node(set_index, true); + landmarks.insert(proposition_id); + add_landmark_node(proposition_id, true); } for (int landmark : landmarks) { add_landmark_node(landmark, false); @@ -934,22 +950,22 @@ unordered_set LandmarkFactoryHM::collect_and_add_landmarks_to_landmark_grap void LandmarkFactoryHM::reduce_landmarks(const unordered_set &landmarks) { assert(use_orders); - for (int landmark1 : landmarks) { - unordered_set extended_prerequisites = - hm_table[landmark1].prerequisite_landmark; - for (int landmark2 : hm_table[landmark1].landmarks) { - union_inplace(extended_prerequisites, - hm_table[landmark2].landmarks); + for (int landmark : landmarks) { + HMEntry &hm_entry = hm_table[landmark]; + set_minus(hm_entry.landmarks, hm_entry.precondition_landmarks); + for (int predecessor_landmark : hm_entry.landmarks) { + set_minus(hm_entry.landmarks, + hm_table[predecessor_landmark].landmarks); } - set_minus(hm_table[landmark1].landmarks, extended_prerequisites); } } void LandmarkFactoryHM::add_landmark_orderings( const unordered_set &landmarks) { + assert(use_orders); for (int to : landmarks) { assert(landmark_nodes.contains(to)); - for (int from : hm_table[to].prerequisite_landmark) { + for (int from : hm_table[to].precondition_landmarks) { assert(landmark_nodes.contains(from)); add_or_replace_ordering_if_stronger( *landmark_nodes[from], *landmark_nodes[to], diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index ab91e8ac2f..69a26d6d0c 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -44,9 +44,7 @@ struct PiMOperator { struct HMEntry { // Propositions that belong to this set. const Propositions propositions; - // Level -1: current cost infinite - // Level 0: present in initial state - int level; + bool reached; std::unordered_set landmarks; /* @@ -54,7 +52,7 @@ struct HMEntry { set is disjoint from `landmarks` above and used to derive greedy-necessary orderings. */ - std::unordered_set prerequisite_landmark; + std::unordered_set precondition_landmarks; std::unordered_set first_achievers; @@ -67,7 +65,7 @@ struct HMEntry { std::vector> triggered_operators; explicit HMEntry(Propositions &&propositions) - : propositions(move(propositions)), level(-1) { + : propositions(move(propositions)), reached(false) { } }; @@ -103,21 +101,29 @@ class LandmarkFactoryHM : public LandmarkFactory { TriggerSet mark_state_propositions_reached( const State &state, const VariablesProxy &variables); void collect_condition_landmarks( - const std::vector &condition, std::unordered_set &landmarks, - std::unordered_set &necessary) - const; + const std::vector &condition, + std::unordered_set &landmarks) const; + void initialize_proposition_landmark( + int op_id, HMEntry &hm_entry, const std::unordered_set &landmarks, + const std::unordered_set &precondition_landmarks, + TriggerSet &triggers); + void update_proposition_landmark( + int op_id, int proposition, const std::unordered_set &landmarks, + const std::unordered_set &precondition_landmarks, + TriggerSet &triggers); void update_effect_landmarks( - int op_id, const std::vector &effect, int level, + int op_id, const std::vector &effect, const std::unordered_set &landmarks, - const std::unordered_set &necessary, TriggerSet &triggers); + const std::unordered_set &precondition_landmarks, + TriggerSet &triggers); void update_noop_landmarks( const std::unordered_set ¤t_triggers, const PiMOperator &op, - int level, const std::unordered_set &landmarks, - const std::unordered_set &necessary, TriggerSet &next_triggers); + const std::unordered_set &landmarks, + const std::unordered_set &prerequisites, TriggerSet &next_triggers); void compute_noop_landmarks( int op_id, int noop_index, const std::unordered_set &local_landmarks, - const std::unordered_set &local_necessary, int level, + const std::unordered_set &local_necessary, TriggerSet &next_trigger); void compute_hm_landmarks(const TaskProxy &task_proxy); @@ -126,7 +132,7 @@ class LandmarkFactoryHM : public LandmarkFactory { void trigger_conditional_noop( int op_id, int noop_id, bool newly_discovered, TriggerSet &trigger); void propagate_pm_propositions( - int proposition_id, bool newly_discovered, TriggerSet &trigger); + HMEntry &hm_entry, bool newly_discovered, TriggerSet &trigger); Propositions initialize_preconditions( const VariablesProxy &variables, const OperatorProxy &op, From 07d3cf33436838d2ccd2a941b4832ae19b84a0b3 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 2 Apr 2025 18:37:46 +0200 Subject: [PATCH 080/108] Simplify comment and implementation. --- src/search/landmarks/landmark_factory_h_m.cc | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 7ca019a064..7763bb5683 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -762,13 +762,10 @@ LandmarkFactoryHM::TriggerSet LandmarkFactoryHM::mark_state_propositions_reached void LandmarkFactoryHM::collect_condition_landmarks( const vector &condition, unordered_set &landmarks) const { for (int proposition : condition) { - const unordered_set &proposition_landmarks = - hm_table[proposition].landmarks; - landmarks.insert( - proposition_landmarks.begin(), proposition_landmarks.end()); + const HMEntry &hm_entry = hm_table[proposition]; + landmarks.insert(hm_entry.landmarks.begin(), hm_entry.landmarks.end()); } - /* For each proposition, the proposition itself is not stored even though - it is a landmark for itself. */ + // Each proposition is a landmark for itself but not stored for itself. landmarks.insert(condition.begin(), condition.end()); } From ed295cd3d04f42a212430282ac31eb33f1841f42 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 3 Apr 2025 08:48:35 +0200 Subject: [PATCH 081/108] Revert to lists instead of unordered_sets because it's faster. --- src/search/landmarks/landmark_factory_h_m.cc | 107 +++++++++++++------ src/search/landmarks/landmark_factory_h_m.h | 27 ++--- 2 files changed, 91 insertions(+), 43 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 7763bb5683..213573a7fd 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -22,18 +22,64 @@ using namespace std; using utils::ExitCode; namespace landmarks { +static void union_inplace(list &set1, const list &set2) { + auto it1 = set1.begin(); + auto it2 = set2.begin(); + + while (it1 != set1.end() && it2 != set2.end()) { + if (*it1 < *it2) { + ++it1; + } else if (*it1 > *it2) { + set1.insert(it1, *it2); + ++it2; + } else { + ++it1; + ++it2; + } + } + set1.insert(it1, it2, set2.end()); +} + static void intersection_inplace( - unordered_set &set1, const unordered_set &set2) { - erase_if(set1, [&set2](int element) { - return !set2.contains(element); - }); + list &set1, const list &set2) { + auto it1 = set1.begin(); + auto tmp = set1.begin(); + auto it2 = set2.begin(); + + while (it1 != set1.end() && it2 != set2.end()) { + if (*it1 < *it2) { + tmp = it1; + ++tmp; + set1.erase(it1); + it1 = tmp; + } else if (*it1 > *it2) { + ++it2; + } else { + ++it1; + ++it2; + } + } } static void set_minus( - unordered_set &set1, const unordered_set &set2) { - erase_if(set1, [&set2](int element) { - return set2.contains(element); - }); + list &set1, const list &set2) { + auto it1 = set1.begin(); + auto tmp = set1.begin(); + auto it2 = set2.begin(); + + while (it1 != set1.end() && it2 != set2.end()) { + if (*it1 < *it2) { + ++it1; + } else if (*it1 > *it2) { + ++it2; + } else { + tmp = it1; + ++tmp; + set1.erase(it1); + it1 = tmp; + ++it2; + } + } } static bool are_mutex(const VariablesProxy &variables, @@ -760,18 +806,18 @@ LandmarkFactoryHM::TriggerSet LandmarkFactoryHM::mark_state_propositions_reached } void LandmarkFactoryHM::collect_condition_landmarks( - const vector &condition, unordered_set &landmarks) const { + const vector &condition, list &landmarks) const { for (int proposition : condition) { const HMEntry &hm_entry = hm_table[proposition]; - landmarks.insert(hm_entry.landmarks.begin(), hm_entry.landmarks.end()); + union_inplace(landmarks, hm_entry.landmarks); } // Each proposition is a landmark for itself but not stored for itself. - landmarks.insert(condition.begin(), condition.end()); + union_inplace(landmarks, list(condition.begin(), condition.end()));; } void LandmarkFactoryHM::initialize_proposition_landmark( - int op_id, HMEntry &hm_entry, const unordered_set &landmarks, - const unordered_set &precondition_landmarks, TriggerSet &triggers) { + int op_id, HMEntry &hm_entry, const list &landmarks, + const list &precondition_landmarks, TriggerSet &triggers) { hm_entry.reached = true; hm_entry.landmarks = landmarks; if (use_orders) { @@ -782,8 +828,8 @@ void LandmarkFactoryHM::initialize_proposition_landmark( } void LandmarkFactoryHM::update_proposition_landmark( - int op_id, int proposition, const unordered_set &landmarks, - const unordered_set &precondition_landmarks, TriggerSet &triggers) { + int op_id, int proposition, const list &landmarks, + const list &precondition_landmarks, TriggerSet &triggers) { HMEntry &hm_entry = hm_table[proposition]; size_t prev_size = hm_entry.landmarks.size(); intersection_inplace(hm_entry.landmarks, landmarks); @@ -793,7 +839,7 @@ void LandmarkFactoryHM::update_proposition_landmark( achieved for the first time. No need to intersect for greedy-necessary orderings or add `op` to the first achievers. */ - if (!landmarks.contains(proposition)) { + if (find(landmarks.begin(), landmarks.end(), proposition) == landmarks.end()) { hm_entry.first_achievers.insert(op_id); if (use_orders) { intersection_inplace( @@ -807,8 +853,8 @@ void LandmarkFactoryHM::update_proposition_landmark( } void LandmarkFactoryHM::update_effect_landmarks( - int op_id, const vector &effect, const unordered_set &landmarks, - const unordered_set &precondition_landmarks, TriggerSet &triggers) { + int op_id, const vector &effect, const list &landmarks, + const list &precondition_landmarks, TriggerSet &triggers) { for (int proposition : effect) { HMEntry &hm_entry = hm_table[proposition]; if (hm_entry.reached) { @@ -824,8 +870,8 @@ void LandmarkFactoryHM::update_effect_landmarks( void LandmarkFactoryHM::update_noop_landmarks( const unordered_set ¤t_triggers, const PiMOperator &op, - const unordered_set &landmarks, - const unordered_set &prerequisites, TriggerSet &next_triggers) { + const list &landmarks, + const list &prerequisites, TriggerSet &next_triggers) { if (current_triggers.empty()) { /* The landmarks for the operator have changed, so we have to recompute @@ -856,12 +902,13 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { TriggerSet next_trigger; for (const auto &[op_id, triggers] : current_trigger) { PiMOperator &op = pm_operators[op_id]; - unordered_set landmarks, precondition_landmarks; + list landmarks, precondition_landmarks; const vector &precondition = op.precondition; collect_condition_landmarks(precondition, landmarks); if (use_orders) { - precondition_landmarks.insert( - precondition.begin(), precondition.end()); + union_inplace( + precondition_landmarks, + list(precondition.begin(), precondition.end())); } update_effect_landmarks(op_id, op.effect, landmarks, precondition_landmarks, next_trigger); @@ -880,18 +927,18 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { } void LandmarkFactoryHM::compute_noop_landmarks( - int op_id, int noop_index, const unordered_set &landmarks, - const unordered_set &necessary, TriggerSet &next_trigger) { + int op_id, int noop_index, const list &landmarks, + const list &necessary, TriggerSet &next_trigger) { const auto &[effect_condition, effect] = pm_operators[op_id].conditional_noops[noop_index]; - unordered_set conditional_noop_landmarks(landmarks); + list conditional_noop_landmarks(landmarks); collect_condition_landmarks(effect_condition, conditional_noop_landmarks); - unordered_set conditional_noop_necessary; + list conditional_noop_necessary; if (use_orders) { conditional_noop_necessary = necessary; - conditional_noop_landmarks.insert( - effect_condition.begin(), effect_condition.end()); + union_inplace(conditional_noop_landmarks, + list(effect_condition.begin(), effect_condition.end())); } update_effect_landmarks( @@ -931,7 +978,7 @@ unordered_set LandmarkFactoryHM::collect_and_add_landmarks_to_landmark_grap } } - const unordered_set &proposition_landmarks = + const list &proposition_landmarks = hm_table[proposition_id].landmarks; landmarks.insert( proposition_landmarks.begin(), proposition_landmarks.end()); diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 69a26d6d0c..353eea5870 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -3,6 +3,7 @@ #include "landmark_factory.h" +#include #include #include @@ -46,13 +47,13 @@ struct HMEntry { const Propositions propositions; bool reached; - std::unordered_set landmarks; + std::list landmarks; /* Landmarks that are "preconditions" to achieve this `HMEntry`. This set is disjoint from `landmarks` above and used to derive greedy-necessary orderings. */ - std::unordered_set precondition_landmarks; + std::list precondition_landmarks; std::unordered_set first_achievers; @@ -102,28 +103,28 @@ class LandmarkFactoryHM : public LandmarkFactory { const State &state, const VariablesProxy &variables); void collect_condition_landmarks( const std::vector &condition, - std::unordered_set &landmarks) const; + std::list &landmarks) const; void initialize_proposition_landmark( - int op_id, HMEntry &hm_entry, const std::unordered_set &landmarks, - const std::unordered_set &precondition_landmarks, + int op_id, HMEntry &hm_entry, const std::list &landmarks, + const std::list &precondition_landmarks, TriggerSet &triggers); void update_proposition_landmark( - int op_id, int proposition, const std::unordered_set &landmarks, - const std::unordered_set &precondition_landmarks, + int op_id, int proposition, const std::list &landmarks, + const std::list &precondition_landmarks, TriggerSet &triggers); void update_effect_landmarks( int op_id, const std::vector &effect, - const std::unordered_set &landmarks, - const std::unordered_set &precondition_landmarks, + const std::list &landmarks, + const std::list &precondition_landmarks, TriggerSet &triggers); void update_noop_landmarks( const std::unordered_set ¤t_triggers, const PiMOperator &op, - const std::unordered_set &landmarks, - const std::unordered_set &prerequisites, TriggerSet &next_triggers); + const std::list &landmarks, + const std::list &prerequisites, TriggerSet &next_triggers); void compute_noop_landmarks( int op_id, int noop_index, - const std::unordered_set &local_landmarks, - const std::unordered_set &local_necessary, + const std::list &local_landmarks, + const std::list &local_necessary, TriggerSet &next_trigger); void compute_hm_landmarks(const TaskProxy &task_proxy); From 9889962afb72b8cd46ace84dfbb0ed3af83766a3 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 3 Apr 2025 10:05:08 +0200 Subject: [PATCH 082/108] Use vectors instead of lists. --- src/search/landmarks/landmark_factory_h_m.cc | 140 +++++++------------ src/search/landmarks/landmark_factory_h_m.h | 28 ++-- 2 files changed, 68 insertions(+), 100 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 213573a7fd..6be10578c6 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -22,64 +22,22 @@ using namespace std; using utils::ExitCode; namespace landmarks { -static void union_inplace(list &set1, const list &set2) { - auto it1 = set1.begin(); - auto it2 = set2.begin(); - - while (it1 != set1.end() && it2 != set2.end()) { - if (*it1 < *it2) { - ++it1; - } else if (*it1 > *it2) { - set1.insert(it1, *it2); - ++it2; - } else { - ++it1; - ++it2; - } - } - set1.insert(it1, it2, set2.end()); +static void intersection_inplace(vector &set1, const vector &set2) { + assert(is_sorted(set1.begin(), set1.end())); + assert(is_sorted(set2.begin(), set2.end())); + vector result; + ranges::set_intersection(set1.begin(), set1.end(), set2.begin(), + set2.end(), back_inserter(result)); + swap(set1, result); } -static void intersection_inplace( - list &set1, const list &set2) { - auto it1 = set1.begin(); - auto tmp = set1.begin(); - auto it2 = set2.begin(); - - while (it1 != set1.end() && it2 != set2.end()) { - if (*it1 < *it2) { - tmp = it1; - ++tmp; - set1.erase(it1); - it1 = tmp; - } else if (*it1 > *it2) { - ++it2; - } else { - ++it1; - ++it2; - } - } -} - -static void set_minus( - list &set1, const list &set2) { - auto it1 = set1.begin(); - auto tmp = set1.begin(); - auto it2 = set2.begin(); - - while (it1 != set1.end() && it2 != set2.end()) { - if (*it1 < *it2) { - ++it1; - } else if (*it1 > *it2) { - ++it2; - } else { - tmp = it1; - ++tmp; - set1.erase(it1); - it1 = tmp; - ++it2; - } - } +static void set_minus(vector &set1, const vector &set2) { + assert(is_sorted(set1.begin(), set1.end())); + assert(is_sorted(set2.begin(), set2.end())); + vector result; + ranges::set_difference(set1.begin(), set1.end(), set2.begin(), + set2.end(), inserter(result, result.begin())); + swap(set1, result); } static bool are_mutex(const VariablesProxy &variables, @@ -712,7 +670,7 @@ void LandmarkFactoryHM::calc_achievers(const TaskProxy &task_proxy) { OperatorsProxy operators = task_proxy.get_operators(); VariablesProxy variables = task_proxy.get_variables(); - /* The `first_achievers` are already filled in by `compute_h_m_landmarks`, + /* The `first_achievers` are already filled in by `compute_hm_landmarks`, so here we only have to do `possible_achievers` */ for (const auto &node : *landmark_graph) { Landmark &landmark = node->get_landmark(); @@ -806,18 +764,20 @@ LandmarkFactoryHM::TriggerSet LandmarkFactoryHM::mark_state_propositions_reached } void LandmarkFactoryHM::collect_condition_landmarks( - const vector &condition, list &landmarks) const { + const vector &condition, vector &landmarks) const { for (int proposition : condition) { - const HMEntry &hm_entry = hm_table[proposition]; - union_inplace(landmarks, hm_entry.landmarks); + const vector &other_landmarks = hm_table[proposition].landmarks; + landmarks.insert(landmarks.end(), other_landmarks.begin(), + other_landmarks.end()); } // Each proposition is a landmark for itself but not stored for itself. - union_inplace(landmarks, list(condition.begin(), condition.end()));; + landmarks.insert(landmarks.end(), condition.begin(), condition.end()); + utils::sort_unique(landmarks); } void LandmarkFactoryHM::initialize_proposition_landmark( - int op_id, HMEntry &hm_entry, const list &landmarks, - const list &precondition_landmarks, TriggerSet &triggers) { + int op_id, HMEntry &hm_entry, const vector &landmarks, + const vector &precondition_landmarks, TriggerSet &triggers) { hm_entry.reached = true; hm_entry.landmarks = landmarks; if (use_orders) { @@ -828,8 +788,8 @@ void LandmarkFactoryHM::initialize_proposition_landmark( } void LandmarkFactoryHM::update_proposition_landmark( - int op_id, int proposition, const list &landmarks, - const list &precondition_landmarks, TriggerSet &triggers) { + int op_id, int proposition, const vector &landmarks, + const vector &precondition_landmarks, TriggerSet &triggers) { HMEntry &hm_entry = hm_table[proposition]; size_t prev_size = hm_entry.landmarks.size(); intersection_inplace(hm_entry.landmarks, landmarks); @@ -853,8 +813,8 @@ void LandmarkFactoryHM::update_proposition_landmark( } void LandmarkFactoryHM::update_effect_landmarks( - int op_id, const vector &effect, const list &landmarks, - const list &precondition_landmarks, TriggerSet &triggers) { + int op_id, const vector &effect, const vector &landmarks, + const vector &precondition_landmarks, TriggerSet &triggers) { for (int proposition : effect) { HMEntry &hm_entry = hm_table[proposition]; if (hm_entry.reached) { @@ -870,8 +830,8 @@ void LandmarkFactoryHM::update_effect_landmarks( void LandmarkFactoryHM::update_noop_landmarks( const unordered_set ¤t_triggers, const PiMOperator &op, - const list &landmarks, - const list &prerequisites, TriggerSet &next_triggers) { + const vector &landmarks, + const vector &prerequisites, TriggerSet &next_triggers) { if (current_triggers.empty()) { /* The landmarks for the operator have changed, so we have to recompute @@ -902,13 +862,14 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { TriggerSet next_trigger; for (const auto &[op_id, triggers] : current_trigger) { PiMOperator &op = pm_operators[op_id]; - list landmarks, precondition_landmarks; + vector landmarks, precondition_landmarks; const vector &precondition = op.precondition; collect_condition_landmarks(precondition, landmarks); if (use_orders) { - union_inplace( - precondition_landmarks, - list(precondition.begin(), precondition.end())); + precondition_landmarks.insert( + precondition_landmarks.end(), precondition.begin(), + precondition.end()); + utils::sort_unique(precondition_landmarks); } update_effect_landmarks(op_id, op.effect, landmarks, precondition_landmarks, next_trigger); @@ -927,23 +888,25 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { } void LandmarkFactoryHM::compute_noop_landmarks( - int op_id, int noop_index, const list &landmarks, - const list &necessary, TriggerSet &next_trigger) { + int op_id, int noop_index, const vector &landmarks, + const vector &necessary, TriggerSet &next_trigger) { const auto &[effect_condition, effect] = pm_operators[op_id].conditional_noops[noop_index]; - list conditional_noop_landmarks(landmarks); + vector conditional_noop_landmarks(landmarks); collect_condition_landmarks(effect_condition, conditional_noop_landmarks); - list conditional_noop_necessary; + vector conditional_noop_necessary; if (use_orders) { conditional_noop_necessary = necessary; - union_inplace(conditional_noop_landmarks, - list(effect_condition.begin(), effect_condition.end())); + conditional_noop_necessary.insert( + conditional_noop_necessary.end(), effect_condition.begin(), + effect_condition.end()); + utils::sort_unique(conditional_noop_necessary); } update_effect_landmarks( - op_id, effect, move(conditional_noop_landmarks), - move(conditional_noop_necessary), next_trigger); + op_id, effect, conditional_noop_landmarks, + conditional_noop_necessary, next_trigger); } void LandmarkFactoryHM::add_landmark_node(int set_index, bool goal) { @@ -978,7 +941,7 @@ unordered_set LandmarkFactoryHM::collect_and_add_landmarks_to_landmark_grap } } - const list &proposition_landmarks = + const vector &proposition_landmarks = hm_table[proposition_id].landmarks; landmarks.insert( proposition_landmarks.begin(), proposition_landmarks.end()); @@ -996,11 +959,16 @@ void LandmarkFactoryHM::reduce_landmarks(const unordered_set &landmarks) { assert(use_orders); for (int landmark : landmarks) { HMEntry &hm_entry = hm_table[landmark]; - set_minus(hm_entry.landmarks, hm_entry.precondition_landmarks); - for (int predecessor_landmark : hm_entry.landmarks) { - set_minus(hm_entry.landmarks, - hm_table[predecessor_landmark].landmarks); + vector landmarks_to_remove(hm_entry.precondition_landmarks); + for (int predecessor : hm_entry.landmarks) { + const vector &predecessor_landmarks = + hm_table[predecessor].landmarks; + landmarks_to_remove.insert( + landmarks_to_remove.end(), predecessor_landmarks.begin(), + predecessor_landmarks.end()); } + utils::sort_unique(landmarks_to_remove); + set_minus(hm_entry.landmarks, landmarks_to_remove); } } diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 353eea5870..8781eb2c18 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -3,9 +3,9 @@ #include "landmark_factory.h" -#include #include #include +#include namespace landmarks { using Propositions = std::vector; @@ -47,13 +47,13 @@ struct HMEntry { const Propositions propositions; bool reached; - std::list landmarks; + std::vector landmarks; /* Landmarks that are "preconditions" to achieve this `HMEntry`. This set is disjoint from `landmarks` above and used to derive greedy-necessary orderings. */ - std::list precondition_landmarks; + std::vector precondition_landmarks; std::unordered_set first_achievers; @@ -103,28 +103,28 @@ class LandmarkFactoryHM : public LandmarkFactory { const State &state, const VariablesProxy &variables); void collect_condition_landmarks( const std::vector &condition, - std::list &landmarks) const; + std::vector &landmarks) const; void initialize_proposition_landmark( - int op_id, HMEntry &hm_entry, const std::list &landmarks, - const std::list &precondition_landmarks, + int op_id, HMEntry &hm_entry, const std::vector &landmarks, + const std::vector &precondition_landmarks, TriggerSet &triggers); void update_proposition_landmark( - int op_id, int proposition, const std::list &landmarks, - const std::list &precondition_landmarks, + int op_id, int proposition, const std::vector &landmarks, + const std::vector &precondition_landmarks, TriggerSet &triggers); void update_effect_landmarks( int op_id, const std::vector &effect, - const std::list &landmarks, - const std::list &precondition_landmarks, + const std::vector &landmarks, + const std::vector &precondition_landmarks, TriggerSet &triggers); void update_noop_landmarks( const std::unordered_set ¤t_triggers, const PiMOperator &op, - const std::list &landmarks, - const std::list &prerequisites, TriggerSet &next_triggers); + const std::vector &landmarks, + const std::vector &prerequisites, TriggerSet &next_triggers); void compute_noop_landmarks( int op_id, int noop_index, - const std::list &local_landmarks, - const std::list &local_necessary, + const std::vector &local_landmarks, + const std::vector &local_necessary, TriggerSet &next_trigger); void compute_hm_landmarks(const TaskProxy &task_proxy); From 2b5f3bec23e3c3cad6af644967772236678c0f51 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Thu, 3 Apr 2025 17:56:50 +0200 Subject: [PATCH 083/108] Fix memory profile. --- src/search/landmarks/landmark_factory_h_m.cc | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 6be10578c6..446f778281 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -22,7 +22,7 @@ using namespace std; using utils::ExitCode; namespace landmarks { -static void intersection_inplace(vector &set1, const vector &set2) { +static void set_intersection(vector &set1, const vector &set2) { assert(is_sorted(set1.begin(), set1.end())); assert(is_sorted(set2.begin(), set2.end())); vector result; @@ -31,7 +31,7 @@ static void intersection_inplace(vector &set1, const vector &set2) { swap(set1, result); } -static void set_minus(vector &set1, const vector &set2) { +static void set_difference(vector &set1, const vector &set2) { assert(is_sorted(set1.begin(), set1.end())); assert(is_sorted(set2.begin(), set2.end())); vector result; @@ -520,7 +520,6 @@ void LandmarkFactoryHM::add_conditional_noop( void LandmarkFactoryHM::initialize_noops( const VariablesProxy &variables, PiMOperator &pm_op, int op_id, const Propositions &preconditions, const Propositions &postconditions) { - pm_op.conditional_noops.reserve(set_indices.size()); /* For all subsets used in the problem with size *<* m, check whether they conflict with the postcondition of the operator. (No need to @@ -539,6 +538,7 @@ void LandmarkFactoryHM::initialize_noops( propositions, preconditions, postconditions); } } + pm_op.conditional_noops.shrink_to_fit(); } void LandmarkFactoryHM::build_pm_operators(const TaskProxy &task_proxy) { @@ -792,7 +792,7 @@ void LandmarkFactoryHM::update_proposition_landmark( const vector &precondition_landmarks, TriggerSet &triggers) { HMEntry &hm_entry = hm_table[proposition]; size_t prev_size = hm_entry.landmarks.size(); - intersection_inplace(hm_entry.landmarks, landmarks); + set_intersection(hm_entry.landmarks, landmarks); /* If the effect appears in `landmarks`, the proposition is not @@ -802,8 +802,8 @@ void LandmarkFactoryHM::update_proposition_landmark( if (find(landmarks.begin(), landmarks.end(), proposition) == landmarks.end()) { hm_entry.first_achievers.insert(op_id); if (use_orders) { - intersection_inplace( - hm_entry.precondition_landmarks, precondition_landmarks); + set_intersection(hm_entry.precondition_landmarks, + precondition_landmarks); } } @@ -864,6 +864,7 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { PiMOperator &op = pm_operators[op_id]; vector landmarks, precondition_landmarks; const vector &precondition = op.precondition; + assert(is_sorted(precondition.begin(), precondition.end())); collect_condition_landmarks(precondition, landmarks); if (use_orders) { precondition_landmarks.insert( @@ -959,6 +960,8 @@ void LandmarkFactoryHM::reduce_landmarks(const unordered_set &landmarks) { assert(use_orders); for (int landmark : landmarks) { HMEntry &hm_entry = hm_table[landmark]; + /* We cannot remove directly from `hm_entry.landmarks` because doing + so invalidates the loop variable. */ vector landmarks_to_remove(hm_entry.precondition_landmarks); for (int predecessor : hm_entry.landmarks) { const vector &predecessor_landmarks = @@ -968,7 +971,7 @@ void LandmarkFactoryHM::reduce_landmarks(const unordered_set &landmarks) { predecessor_landmarks.end()); } utils::sort_unique(landmarks_to_remove); - set_minus(hm_entry.landmarks, landmarks_to_remove); + set_difference(hm_entry.landmarks, landmarks_to_remove); } } From e39594c21a8154b0d5cc749023d10500f706619f Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 8 Apr 2025 16:38:04 +0200 Subject: [PATCH 084/108] Fix accidental changes in hm-landmark graphs. This fixes two things: On the one hand, the number of orderings was different than in the base version, which lead to different heuristics. Additionally, the approximation of possible achievers was less strict than in the base version. Now it is even stricter than in the base version which might actually improve the heuristic. --- src/search/landmarks/landmark_factory.cc | 4 ++-- src/search/landmarks/landmark_factory_h_m.cc | 16 +++++++++++++--- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index 55c7686568..5a24f73326 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -131,8 +131,8 @@ void LandmarkFactory::log_landmark_graph_info( << landmark_graph->get_num_disjunctive_landmarks() << " are disjunctive and " << landmark_graph->get_num_conjunctive_landmarks() - << " are conjunctive.\nThere are " - << landmark_graph->get_num_orderings() + << " are conjunctive." << endl; + log << "There are " << landmark_graph->get_num_orderings() << " landmark orderings." << endl; } } diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 446f778281..20db432dad 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -629,10 +629,12 @@ static bool operator_can_achieve_landmark( Propositions postcondition = get_operator_postcondition(static_cast(variables.size()), op); + int matching_postconditions = 0; for (const FactPair &atom : landmark.atoms) { if (find(postcondition.begin(), postcondition.end(), atom) != postcondition.end()) { // This `atom` is a postcondition of `op`, move on to the next one. + ++matching_postconditions; continue; } auto mutex = [&](const FactPair &other) { @@ -642,7 +644,7 @@ static bool operator_can_achieve_landmark( return false; } } - return true; + return matching_postconditions == static_cast(landmark.atoms.size()); } void LandmarkFactoryHM::approximate_possible_achievers( @@ -864,7 +866,6 @@ void LandmarkFactoryHM::compute_hm_landmarks(const TaskProxy &task_proxy) { PiMOperator &op = pm_operators[op_id]; vector landmarks, precondition_landmarks; const vector &precondition = op.precondition; - assert(is_sorted(precondition.begin(), precondition.end())); collect_condition_landmarks(precondition, landmarks); if (use_orders) { precondition_landmarks.insert( @@ -958,7 +959,16 @@ unordered_set LandmarkFactoryHM::collect_and_add_landmarks_to_landmark_grap void LandmarkFactoryHM::reduce_landmarks(const unordered_set &landmarks) { assert(use_orders); - for (int landmark : landmarks) { + /* + TODO: This function depends on the order in which landmarks are processed. + I don't think there's a particular reason to sort the landmarks apart + from it was like this before the refactoring in issue992 and we wanted + the changes to induce no semantic changes. It's probably best to replace + this with a deterministic function that does not depend on the order. + */ + vector sorted_landmarks(landmarks.begin(), landmarks.end()); + sort(sorted_landmarks.begin(), sorted_landmarks.end()); + for (int landmark : sorted_landmarks) { HMEntry &hm_entry = hm_table[landmark]; /* We cannot remove directly from `hm_entry.landmarks` because doing so invalidates the loop variable. */ From ed4ee1a03dd1a698a2c791192af84d06bd8bf6bc Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 9 Apr 2025 09:47:15 +0200 Subject: [PATCH 085/108] Fix computation of possible achievers. --- src/search/landmarks/landmark_factory_h_m.cc | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 20db432dad..dc551f6051 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -629,12 +629,10 @@ static bool operator_can_achieve_landmark( Propositions postcondition = get_operator_postcondition(static_cast(variables.size()), op); - int matching_postconditions = 0; for (const FactPair &atom : landmark.atoms) { if (find(postcondition.begin(), postcondition.end(), atom) != postcondition.end()) { // This `atom` is a postcondition of `op`, move on to the next one. - ++matching_postconditions; continue; } auto mutex = [&](const FactPair &other) { @@ -643,8 +641,11 @@ static bool operator_can_achieve_landmark( if (any_of(postcondition.begin(), postcondition.end(), mutex)) { return false; } + if (any_of(precondition.begin(), precondition.end(), mutex)) { + return false; + } } - return matching_postconditions == static_cast(landmark.atoms.size()); + return true; } void LandmarkFactoryHM::approximate_possible_achievers( From 4597a3278073b659f786759c35e84af02553f5df Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Mon, 3 Mar 2025 18:29:51 +0100 Subject: [PATCH 086/108] use prototype implementation as a basis --- .../refinement_hierarchy.cc | 2 +- .../subtask_generators.cc | 2 +- src/search/heuristics/cea_heuristic.cc | 7 ++- src/search/heuristics/cg_cache.cc | 2 +- src/search/heuristics/cg_heuristic.cc | 6 +- src/search/landmarks/landmark.cc | 4 +- .../landmarks/landmark_factory_rpg_sasp.cc | 16 ++--- .../landmarks/landmark_factory_zhu_givan.cc | 2 +- src/search/merge_and_shrink/fts_factory.cc | 2 +- .../merge_and_shrink_representation.cc | 2 +- .../state_equation_constraints.cc | 2 +- src/search/pdbs/cegar.cc | 2 +- src/search/pruning/stubborn_sets.h | 2 +- .../pruning/stubborn_sets_atom_centric.cc | 6 +- src/search/pruning/stubborn_sets_ec.cc | 6 +- src/search/state_registry.cc | 2 +- src/search/task_proxy.h | 61 ++++++++++++++++--- 17 files changed, 86 insertions(+), 40 deletions(-) diff --git a/src/search/cartesian_abstractions/refinement_hierarchy.cc b/src/search/cartesian_abstractions/refinement_hierarchy.cc index 7431b118ef..704f54c48c 100644 --- a/src/search/cartesian_abstractions/refinement_hierarchy.cc +++ b/src/search/cartesian_abstractions/refinement_hierarchy.cc @@ -60,7 +60,7 @@ NodeID RefinementHierarchy::get_node_id(const State &state) const { NodeID id = 0; while (nodes[id].is_split()) { const Node &node = nodes[id]; - id = node.get_child(state[node.get_var()].get_value()); + id = node.get_child(state[node.get_var()]); } return id; } diff --git a/src/search/cartesian_abstractions/subtask_generators.cc b/src/search/cartesian_abstractions/subtask_generators.cc index 3d3b0adcb1..a861c7cdf2 100644 --- a/src/search/cartesian_abstractions/subtask_generators.cc +++ b/src/search/cartesian_abstractions/subtask_generators.cc @@ -53,7 +53,7 @@ static void remove_initial_state_facts( const TaskProxy &task_proxy, Facts &facts) { State initial_state = task_proxy.get_initial_state(); facts.erase(remove_if(facts.begin(), facts.end(), [&](FactPair fact) { - return initial_state[fact.var].get_value() == fact.value; + return initial_state[fact.var] == fact.value; }), facts.end()); } diff --git a/src/search/heuristics/cea_heuristic.cc b/src/search/heuristics/cea_heuristic.cc index 12aba20f85..e070afa7ef 100644 --- a/src/search/heuristics/cea_heuristic.cc +++ b/src/search/heuristics/cea_heuristic.cc @@ -228,7 +228,8 @@ void ContextEnhancedAdditiveHeuristic::set_up_local_problem( LocalProblemNode *start = &problem->nodes[start_value]; start->cost = 0; for (size_t i = 0; i < problem->context_variables->size(); ++i) - start->context[i] = state[(*problem->context_variables)[i]].get_value(); + // TODO issue997: is casting from int to a short here fine? + start->context[i] = static_cast(state[(*problem->context_variables)[i]]); add_to_heap(start); } @@ -379,10 +380,10 @@ void ContextEnhancedAdditiveHeuristic::mark_helpful_transitions( int precond_value = assignment.value; int local_var = assignment.local_var; int precond_var_no = context_vars[local_var]; - if (state[precond_var_no].get_value() == precond_value) + if (state[precond_var_no] == precond_value) continue; LocalProblem *subproblem = get_local_problem( - precond_var_no, state[precond_var_no].get_value()); + precond_var_no, state[precond_var_no]); LocalProblemNode *subnode = &subproblem->nodes[precond_value]; mark_helpful_transitions(subproblem, subnode, state); } diff --git a/src/search/heuristics/cg_cache.cc b/src/search/heuristics/cg_cache.cc index 2479136d34..c431e51312 100644 --- a/src/search/heuristics/cg_cache.cc +++ b/src/search/heuristics/cg_cache.cc @@ -119,7 +119,7 @@ int CGCache::get_index(int var, const State &state, int index = from_val; int multiplier = task_proxy.get_variables()[var].get_domain_size(); for (int dep_var : depends_on[var]) { - index += state[dep_var].get_value() * multiplier; + index += state[dep_var] * multiplier; multiplier *= task_proxy.get_variables()[dep_var].get_domain_size(); } if (to_val > from_val) diff --git a/src/search/heuristics/cg_heuristic.cc b/src/search/heuristics/cg_heuristic.cc index 8ec18a3deb..d8047bb53d 100644 --- a/src/search/heuristics/cg_heuristic.cc +++ b/src/search/heuristics/cg_heuristic.cc @@ -58,7 +58,7 @@ int CGHeuristic::compute_heuristic(const State &ancestor_state) { for (FactProxy goal : task_proxy.get_goals()) { const VariableProxy var = goal.get_variable(); int var_no = var.get_id(); - int from = state[var_no].get_value(), to = goal.get_value(); + int from = state[var_no], to = goal.get_value(); DomainTransitionGraph *dtg = transition_graphs[var_no].get(); int cost_for_goal = get_transition_cost(state, dtg, from, to); if (cost_for_goal == numeric_limits::max()) { @@ -114,7 +114,7 @@ int CGHeuristic::get_transition_cost(const State &state, start->children_state.resize(dtg->local_to_global_child.size()); for (size_t i = 0; i < dtg->local_to_global_child.size(); ++i) { start->children_state[i] = - state[dtg->local_to_global_child[i]].get_value(); + state[dtg->local_to_global_child[i]]; } // Initialize Heap for Dijkstra's algorithm. @@ -226,7 +226,7 @@ int CGHeuristic::get_transition_cost(const State &state, void CGHeuristic::mark_helpful_transitions(const State &state, DomainTransitionGraph *dtg, int to) { int var_no = dtg->var; - int from = state[var_no].get_value(); + int from = state[var_no]; if (from == to) return; diff --git a/src/search/landmarks/landmark.cc b/src/search/landmarks/landmark.cc index 4056858c9a..5c08d82600 100644 --- a/src/search/landmarks/landmark.cc +++ b/src/search/landmarks/landmark.cc @@ -6,7 +6,7 @@ namespace landmarks { bool Landmark::is_true_in_state(const State &state) const { if (disjunctive) { for (const FactPair &fact : facts) { - if (state[fact.var].get_value() == fact.value) { + if (state[fact.var] == fact.value) { return true; } } @@ -14,7 +14,7 @@ bool Landmark::is_true_in_state(const State &state) const { } else { // conjunctive or simple for (const FactPair &fact : facts) { - if (state[fact.var].get_value() != fact.value) { + if (state[fact.var] != fact.value) { return false; } } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 19b7005942..2c91161d95 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -96,8 +96,8 @@ void LandmarkFactoryRpgSasp::get_greedy_preconditions_for_lm( if (!has_precondition_on_var[var_id] && effect_fact.get_variable().get_domain_size() == 2) { for (const FactPair &lm_fact : landmark.facts) { if (lm_fact.var == var_id && - initial_state[var_id].get_value() != lm_fact.value) { - result.emplace(var_id, initial_state[var_id].get_value()); + initial_state[var_id] != lm_fact.value) { + result.emplace(var_id, initial_state[var_id]); break; } } @@ -206,7 +206,7 @@ void LandmarkFactoryRpgSasp::found_disj_lm_and_order( FactPair lm_prop = FactPair::no_fact; State initial_state = task_proxy.get_initial_state(); for (const FactPair &lm : a) { - if (initial_state[lm.var].get_value() == lm.value) { + if (initial_state[lm.var] == lm.value) { return; } if (lm_graph->contains_simple_landmark(lm)) { @@ -526,17 +526,17 @@ bool LandmarkFactoryRpgSasp::domain_connectivity(const State &initial_state, is crucial for achieving the landmark (i.e. is on every path to the LM). */ int var = landmark.var; - assert(landmark.value != initial_state[var].get_value()); // no initial state landmarks + assert(landmark.value != initial_state[var]); // no initial state landmarks // The value that we want to achieve must not be excluded: assert(exclude.find(landmark.value) == exclude.end()); // If the value in the initial state is excluded, we won't achieve our goal value: - if (exclude.find(initial_state[var].get_value()) != exclude.end()) + if (exclude.find(initial_state[var]) != exclude.end()) return false; list open; - unordered_set closed(initial_state[var].get_variable().get_domain_size()); + unordered_set closed(initial_state.get_task().get_variables()[var].get_domain_size()); closed = exclude; - open.push_back(initial_state[var].get_value()); - closed.insert(initial_state[var].get_value()); + open.push_back(initial_state[var]); + closed.insert(initial_state[var]); const vector> &successors = dtg_successors[var]; while (closed.find(landmark.value) == closed.end()) { if (open.empty()) // landmark not in closed and nothing more to insert diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index a8f92e93c4..8ccf44f862 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -112,7 +112,7 @@ LandmarkFactoryZhuGivan::PropositionLayer LandmarkFactoryZhuGivan::build_relaxed current_prop_layer[var_id].resize(var.get_domain_size()); // label nodes from initial state - int value = initial_state[var].get_value(); + int value = initial_state[var.get_id()]; current_prop_layer[var_id][value].labels.emplace(var_id, value); triggered.insert(triggers[var_id][value].begin(), triggers[var_id][value].end()); diff --git a/src/search/merge_and_shrink/fts_factory.cc b/src/search/merge_and_shrink/fts_factory.cc index 48604f1680..9528b1ed79 100644 --- a/src/search/merge_and_shrink/fts_factory.cc +++ b/src/search/merge_and_shrink/fts_factory.cc @@ -118,7 +118,7 @@ unique_ptr FTSFactory::create_labels() { void FTSFactory::build_state_data(VariableProxy var) { int var_id = var.get_id(); TransitionSystemData &ts_data = transition_system_data_by_var[var_id]; - ts_data.init_state = task_proxy.get_initial_state()[var_id].get_value(); + ts_data.init_state = task_proxy.get_initial_state()[var_id]; int range = task_proxy.get_variables()[var_id].get_domain_size(); ts_data.num_states = range; diff --git a/src/search/merge_and_shrink/merge_and_shrink_representation.cc b/src/search/merge_and_shrink/merge_and_shrink_representation.cc index 18731721ca..7fcd08657f 100644 --- a/src/search/merge_and_shrink/merge_and_shrink_representation.cc +++ b/src/search/merge_and_shrink/merge_and_shrink_representation.cc @@ -58,7 +58,7 @@ void MergeAndShrinkRepresentationLeaf::apply_abstraction_to_lookup_table( } int MergeAndShrinkRepresentationLeaf::get_value(const State &state) const { - int value = state[var_id].get_value(); + int value = state[var_id]; return lookup_table[value]; } diff --git a/src/search/operator_counting/state_equation_constraints.cc b/src/search/operator_counting/state_equation_constraints.cc index 877056d8b7..51c4531185 100644 --- a/src/search/operator_counting/state_equation_constraints.cc +++ b/src/search/operator_counting/state_equation_constraints.cc @@ -103,7 +103,7 @@ bool StateEquationConstraints::update_constraints(const State &state, double lower_bound = 0; /* If we consider the current value of var, there must be an additional consumer. */ - if (state[var].get_value() == value) { + if (state[var] == value) { --lower_bound; } /* If we consider the goal value of var, there must be an diff --git a/src/search/pdbs/cegar.cc b/src/search/pdbs/cegar.cc index 0c2c5e179c..911b6f3356 100644 --- a/src/search/pdbs/cegar.cc +++ b/src/search/pdbs/cegar.cc @@ -387,7 +387,7 @@ bool CEGAR::get_flaws_for_pattern( bool raise_goal_flaw = false; for (const FactPair &goal : goals) { int goal_var_id = goal.var; - if (final_state[goal_var_id].get_value() != goal.value && + if (final_state[goal_var_id] != goal.value && !blacklisted_variables.count(goal_var_id)) { flaws.emplace_back(collection_index, goal_var_id); raise_goal_flaw = true; diff --git a/src/search/pruning/stubborn_sets.h b/src/search/pruning/stubborn_sets.h index 402395ddec..43807046f0 100644 --- a/src/search/pruning/stubborn_sets.h +++ b/src/search/pruning/stubborn_sets.h @@ -62,7 +62,7 @@ class StubbornSets : public PruningMethod { inline FactPair find_unsatisfied_condition( const std::vector &conditions, const State &state) { for (const FactPair &condition : conditions) { - if (state[condition.var].get_value() != condition.value) + if (state[condition.var] != condition.value) return condition; } return FactPair::no_fact; diff --git a/src/search/pruning/stubborn_sets_atom_centric.cc b/src/search/pruning/stubborn_sets_atom_centric.cc index c3b6fc1027..4f78f757be 100644 --- a/src/search/pruning/stubborn_sets_atom_centric.cc +++ b/src/search/pruning/stubborn_sets_atom_centric.cc @@ -138,7 +138,7 @@ FactPair StubbornSetsAtomCentric::select_fact( choose it. Otherwise, choose the first unsatisfied fact. */ for (const FactPair &condition : facts) { - if (state[condition.var].get_value() != condition.value) { + if (state[condition.var] != condition.value) { if (marked_producers[condition.var][condition.value]) { fact = condition; break; @@ -150,7 +150,7 @@ FactPair StubbornSetsAtomCentric::select_fact( } else if (atom_selection_strategy == AtomSelectionStrategy::STATIC_SMALL) { int min_count = numeric_limits::max(); for (const FactPair &condition : facts) { - if (state[condition.var].get_value() != condition.value) { + if (state[condition.var] != condition.value) { int count = achievers[condition.var][condition.value].size(); if (count < min_count) { fact = condition; @@ -161,7 +161,7 @@ FactPair StubbornSetsAtomCentric::select_fact( } else if (atom_selection_strategy == AtomSelectionStrategy::DYNAMIC_SMALL) { int min_count = numeric_limits::max(); for (const FactPair &condition : facts) { - if (state[condition.var].get_value() != condition.value) { + if (state[condition.var] != condition.value) { const vector &ops = achievers[condition.var][condition.value]; int count = count_if( ops.begin(), ops.end(), [&](int op) {return !stubborn[op];}); diff --git a/src/search/pruning/stubborn_sets_ec.cc b/src/search/pruning/stubborn_sets_ec.cc index d6b2972fde..bc6f0a953b 100644 --- a/src/search/pruning/stubborn_sets_ec.cc +++ b/src/search/pruning/stubborn_sets_ec.cc @@ -18,7 +18,7 @@ static inline bool is_v_applicable(int var, const State &state, vector> &preconditions) { int precondition_on_var = preconditions[op_no][var]; - return precondition_on_var == -1 || precondition_on_var == state[var].get_value(); + return precondition_on_var == -1 || precondition_on_var == state[var]; } static vector build_dtgs(TaskProxy task_proxy) { @@ -164,7 +164,7 @@ void StubbornSetsEC::compute_active_operators(const State &state) { for (const FactPair &precondition : sorted_op_preconditions[op_no]) { int var_id = precondition.var; - int current_value = state[var_id].get_value(); + int current_value = state[var_id]; const vector &reachable_values = reachability_map[var_id][current_value]; if (!reachable_values[precondition.value]) { @@ -258,7 +258,7 @@ void StubbornSetsEC::get_disabled_vars( void StubbornSetsEC::apply_s5(int op_no, const State &state) { // Find a violated state variable and check if stubborn contains a writer for this variable. for (const FactPair &pre : sorted_op_preconditions[op_no]) { - if (state[pre.var].get_value() != pre.value && written_vars[pre.var]) { + if (state[pre.var] != pre.value && written_vars[pre.var]) { if (!nes_computed[pre.var][pre.value]) { add_nes_for_fact(pre, state); } diff --git a/src/search/state_registry.cc b/src/search/state_registry.cc index a494c05023..47a07b6374 100644 --- a/src/search/state_registry.cc +++ b/src/search/state_registry.cc @@ -56,7 +56,7 @@ const State &StateRegistry::get_initial_state() { State initial_state = task_proxy.get_initial_state(); for (size_t i = 0; i < initial_state.size(); ++i) { - state_packer.set(buffer.get(), i, initial_state[i].get_value()); + state_packer.set(buffer.get(), i, initial_state[i]); } state_data_pool.push_back(buffer.get()); StateID id = insert_id_or_pop_state(); diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 4a07f81518..75b0827e02 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -95,7 +95,8 @@ using PackedStateBin = int_packer::IntPacker::Bin; /* Basic iterator support for proxy collections. */ -template +template::value>::type* = nullptr> class ProxyIterator { /* We store a pointer to collection instead of a reference because iterators have to be copy assignable. */ @@ -582,8 +583,6 @@ class State { const int_packer::IntPacker *state_packer; int num_variables; public: - using ItemType = FactProxy; - // Construct a registered state with only packed data. State(const AbstractTask &task, const StateRegistry ®istry, StateID id, const PackedStateBin *buffer); @@ -601,7 +600,7 @@ class State { void unpack() const; std::size_t size() const; - FactProxy operator[](std::size_t var_id) const; + int operator[](std::size_t var_id) const; FactProxy operator[](VariableProxy var) const; TaskProxy get_task() const; @@ -801,19 +800,19 @@ inline std::size_t State::size() const { return num_variables; } -inline FactProxy State::operator[](std::size_t var_id) const { +inline int State::operator[](std::size_t var_id) const { assert(var_id < size()); if (values) { - return FactProxy(*task, var_id, (*values)[var_id]); + return (*values)[var_id]; } else { assert(buffer); assert(state_packer); - return FactProxy(*task, var_id, state_packer->get(buffer, var_id)); + return state_packer->get(buffer, var_id); } } inline FactProxy State::operator[](VariableProxy var) const { - return (*this)[var.get_id()]; + return FactProxy(*task, var.get_id(), (*this)[var.get_id()]); } inline TaskProxy State::get_task() const { @@ -853,4 +852,50 @@ inline const std::vector &State::get_unpacked_values() const { } return *values; } + +class StateIterator { + ProxyIterator variables_iterator; + const State *state; +public: + using iterator_category = std::input_iterator_tag; + using value_type = FactProxy; + using difference_type = int; + using pointer = const value_type *; + using reference = value_type; + + StateIterator(const State &state, std::size_t pos) + : variables_iterator(state.get_task().get_variables(), pos), state(&state) { + } + + reference operator*() const { + return (*state)[*variables_iterator]; + } + + value_type operator++(int) { + value_type value(**this); + ++(*this); + return value; + } + + StateIterator &operator++() { + ++variables_iterator; + return *this; + } + + bool operator==(const StateIterator &other) const { + return variables_iterator == other.variables_iterator; + } + + bool operator!=(const StateIterator &other) const { + return !(*this == other); + } +}; + +inline StateIterator begin(const State &state) { + return StateIterator(state, 0); +} + +inline StateIterator end(const State &state) { + return StateIterator(state, state.size()); +} #endif From 7b5929155029f419eeff48a74864e6822d041f7c Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Thu, 10 Apr 2025 11:53:29 +0200 Subject: [PATCH 087/108] too complex attempt to solve this with concepts --- src/search/task_proxy.h | 118 ++++++++++++++++++++++------------------ 1 file changed, 65 insertions(+), 53 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 75b0827e02..45ab57b199 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -7,7 +7,6 @@ #include "task_id.h" #include "algorithms/int_packer.h" -#include "utils/collections.h" #include "utils/hash.h" #include "utils/system.h" @@ -91,30 +90,51 @@ using PackedStateBin = int_packer::IntPacker::Bin; task_properties.h module. */ +template +concept can_be_dereferenced = requires (Pos pos) { + { *pos }; +}; + +template +constexpr auto dereference_if_necessary(Pos p) { + if constexpr (can_be_dereferenced) { + return *p; + } else { + return p; + } +} + +template +concept indexable_with = requires (Container container, Pos pos) { + requires std::same_as>; + { container.size() } -> std::integral; + { container[dereference_if_necessary(pos)] } + -> std::same_as>; +}; /* Basic iterator support for proxy collections. */ -template::value>::type* = nullptr> +template + requires indexable_with class ProxyIterator { /* We store a pointer to collection instead of a reference because iterators have to be copy assignable. */ const ProxyCollection *collection; - std::size_t pos; + Pos pos; public: using iterator_category = std::input_iterator_tag; - using value_type = typename ProxyCollection::ItemType; + using value_type = decltype((*collection)[0]); using difference_type = int; using pointer = const value_type *; using reference = value_type; - ProxyIterator(const ProxyCollection &collection, std::size_t pos) + ProxyIterator(const ProxyCollection &collection, Pos pos) : collection(&collection), pos(pos) { } reference operator*() const { - return (*collection)[pos]; + return (*collection)[dereference_if_necessary(pos)]; } value_type operator++(int) { @@ -138,17 +158,6 @@ class ProxyIterator { } }; -template -inline ProxyIterator begin(ProxyCollection &collection) { - return ProxyIterator(collection, 0); -} - -template -inline ProxyIterator end(ProxyCollection &collection) { - return ProxyIterator(collection, collection.size()); -} - - class FactProxy { const AbstractTask *task; FactPair fact; @@ -853,49 +862,52 @@ inline const std::vector &State::get_unpacked_values() const { return *values; } -class StateIterator { - ProxyIterator variables_iterator; - const State *state; -public: - using iterator_category = std::input_iterator_tag; - using value_type = FactProxy; - using difference_type = int; - using pointer = const value_type *; - using reference = value_type; +inline ProxyIterator> begin(const State &state); +inline ProxyIterator> end(const State &state); - StateIterator(const State &state, std::size_t pos) - : variables_iterator(state.get_task().get_variables(), pos), state(&state) { - } +template + requires (!std::same_as) +inline ProxyIterator begin(ProxyCollection &collection) { + return ProxyIterator(collection, 0); +} - reference operator*() const { - return (*state)[*variables_iterator]; - } +template + requires (!std::same_as) +inline ProxyIterator begin(const ProxyCollection &collection) { + return ProxyIterator(collection, 0); +} - value_type operator++(int) { - value_type value(**this); - ++(*this); - return value; - } +template +inline ProxyIterator end(ProxyCollection &collection) { + return ProxyIterator(collection, collection.size()); +} - StateIterator &operator++() { - ++variables_iterator; - return *this; - } +template +inline ProxyIterator end(const ProxyCollection &collection) { + return ProxyIterator(collection, collection.size()); +} - bool operator==(const StateIterator &other) const { - return variables_iterator == other.variables_iterator; - } +static_assert(std::input_iterator>); +static_assert(std::input_iterator>>); +inline ProxyIterator> begin(const State &state) { + ProxyIterator variables_it = begin(state.get_task().get_variables()); + return ProxyIterator>(state, variables_it); +} - bool operator!=(const StateIterator &other) const { - return !(*this == other); - } -}; +inline ProxyIterator> end(const State &state) { + ProxyIterator variables_it = end(state.get_task().get_variables()); + return ProxyIterator>(state, variables_it); +} -inline StateIterator begin(const State &state) { - return StateIterator(state, 0); +inline ProxyIterator> begin(State &state) { + ProxyIterator variables_it = begin(state.get_task().get_variables()); + return ProxyIterator>(state, variables_it); } -inline StateIterator end(const State &state) { - return StateIterator(state, state.size()); +inline ProxyIterator> end(State &state) { + ProxyIterator variables_it = end(state.get_task().get_variables()); + return ProxyIterator>(state, variables_it); } + + #endif From bb70e990fdd0f272b5f0191f4f27ee448d89d014 Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Thu, 10 Apr 2025 13:02:59 +0200 Subject: [PATCH 088/108] use template specialization --- src/search/task_proxy.h | 110 ++++++++++++++++------------------------ 1 file changed, 45 insertions(+), 65 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 45ab57b199..c6a760d6bf 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -7,6 +7,7 @@ #include "task_id.h" #include "algorithms/int_packer.h" +#include "utils/collections.h" #include "utils/hash.h" #include "utils/system.h" @@ -90,38 +91,22 @@ using PackedStateBin = int_packer::IntPacker::Bin; task_properties.h module. */ -template -concept can_be_dereferenced = requires (Pos pos) { - { *pos }; -}; - -template -constexpr auto dereference_if_necessary(Pos p) { - if constexpr (can_be_dereferenced) { - return *p; - } else { - return p; - } -} - -template -concept indexable_with = requires (Container container, Pos pos) { +template +concept indexable = requires (Container container, std::size_t i) { requires std::same_as>; { container.size() } -> std::integral; - { container[dereference_if_necessary(pos)] } - -> std::same_as>; + { container[i] }; }; /* Basic iterator support for proxy collections. */ -template - requires indexable_with +template class ProxyIterator { /* We store a pointer to collection instead of a reference because iterators have to be copy assignable. */ const ProxyCollection *collection; - Pos pos; + std::size_t pos; public: using iterator_category = std::input_iterator_tag; using value_type = decltype((*collection)[0]); @@ -129,12 +114,12 @@ class ProxyIterator { using pointer = const value_type *; using reference = value_type; - ProxyIterator(const ProxyCollection &collection, Pos pos) + ProxyIterator(const ProxyCollection &collection, std::size_t pos) : collection(&collection), pos(pos) { } reference operator*() const { - return (*collection)[dereference_if_necessary(pos)]; + return (*collection)[pos]; } value_type operator++(int) { @@ -158,6 +143,18 @@ class ProxyIterator { } }; +template +inline ProxyIterator begin(const ProxyCollection &collection) { + return ProxyIterator(collection, 0); +} + +template +inline ProxyIterator end(const ProxyCollection &collection) { + return ProxyIterator(collection, collection.size()); +} + + + class FactProxy { const AbstractTask *task; FactPair fact; @@ -862,52 +859,35 @@ inline const std::vector &State::get_unpacked_values() const { return *values; } -inline ProxyIterator> begin(const State &state); -inline ProxyIterator> end(const State &state); - -template - requires (!std::same_as) -inline ProxyIterator begin(ProxyCollection &collection) { - return ProxyIterator(collection, 0); -} - -template - requires (!std::same_as) -inline ProxyIterator begin(const ProxyCollection &collection) { - return ProxyIterator(collection, 0); -} - -template -inline ProxyIterator end(ProxyCollection &collection) { - return ProxyIterator(collection, collection.size()); -} - -template -inline ProxyIterator end(const ProxyCollection &collection) { - return ProxyIterator(collection, collection.size()); -} +template<> +class ProxyIterator { + const State *state; + const VariablesProxy variables; + int var_id; +public: + ProxyIterator(const State &state, int var_id) + : state(&state), variables(state.get_task().get_variables()), var_id(var_id) { + } -static_assert(std::input_iterator>); -static_assert(std::input_iterator>>); -inline ProxyIterator> begin(const State &state) { - ProxyIterator variables_it = begin(state.get_task().get_variables()); - return ProxyIterator>(state, variables_it); -} + FactProxy operator*() const { + return (*state)[variables[var_id]]; + } -inline ProxyIterator> end(const State &state) { - ProxyIterator variables_it = end(state.get_task().get_variables()); - return ProxyIterator>(state, variables_it); -} + ProxyIterator &operator++() { + assert(var_id < variables.size()); + ++var_id; + return *this; + } -inline ProxyIterator> begin(State &state) { - ProxyIterator variables_it = begin(state.get_task().get_variables()); - return ProxyIterator>(state, variables_it); -} + bool operator==(const ProxyIterator &other) const { + assert(state == other.state); + return var_id == other.var_id; + } -inline ProxyIterator> end(State &state) { - ProxyIterator variables_it = end(state.get_task().get_variables()); - return ProxyIterator>(state, variables_it); -} + bool operator!=(const ProxyIterator &other) const { + return !(*this == other); + } +}; #endif From 43a79294ac3d6d17061aee812fc3cd7844da79c0 Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Thu, 10 Apr 2025 14:21:21 +0200 Subject: [PATCH 089/108] fix style --- src/search/task_proxy.h | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index c6a760d6bf..342ac577bd 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -92,10 +92,15 @@ using PackedStateBin = int_packer::IntPacker::Bin; */ template -concept indexable = requires (Container container, std::size_t i) { +concept indexable = requires(Container container, std::size_t i) { requires std::same_as>; - { container.size() } -> std::integral; - { container[i] }; + { + container.size() + } + ->std::integral; + { + container[i] + } }; /* @@ -143,12 +148,12 @@ class ProxyIterator { } }; -template +template inline ProxyIterator begin(const ProxyCollection &collection) { return ProxyIterator(collection, 0); } -template +template inline ProxyIterator end(const ProxyCollection &collection) { return ProxyIterator(collection, collection.size()); } From 56e58ede27345c06e6574ae1b928774b5ebda50c Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Thu, 10 Apr 2025 14:37:13 +0200 Subject: [PATCH 090/108] remove item type --- src/search/task_proxy.h | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 342ac577bd..a86a314097 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -100,7 +100,7 @@ concept indexable = requires(Container container, std::size_t i) { ->std::integral; { container[i] - } + }; }; /* @@ -263,7 +263,6 @@ class ConditionsProxy { protected: const AbstractTask *task; public: - using ItemType = FactProxy; explicit ConditionsProxy(const AbstractTask &task) : task(&task) {} virtual ~ConditionsProxy() = default; @@ -337,7 +336,6 @@ class VariableProxy { class VariablesProxy { const AbstractTask *task; public: - using ItemType = VariableProxy; explicit VariablesProxy(const AbstractTask &task) : task(&task) {} ~VariablesProxy() = default; @@ -425,7 +423,6 @@ class EffectsProxy { int op_index; bool is_axiom; public: - using ItemType = EffectProxy; EffectsProxy(const AbstractTask &task, int op_index, bool is_axiom) : task(&task), op_index(op_index), is_axiom(is_axiom) {} ~EffectsProxy() = default; @@ -498,7 +495,6 @@ class OperatorProxy { class OperatorsProxy { const AbstractTask *task; public: - using ItemType = OperatorProxy; explicit OperatorsProxy(const AbstractTask &task) : task(&task) {} ~OperatorsProxy() = default; @@ -525,7 +521,6 @@ class OperatorsProxy { class AxiomsProxy { const AbstractTask *task; public: - using ItemType = OperatorProxy; explicit AxiomsProxy(const AbstractTask &task) : task(&task) {} ~AxiomsProxy() = default; From aa850acc930b7780fb61464692f36f3ff291ebc1 Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Sat, 12 Apr 2025 12:06:00 +0200 Subject: [PATCH 091/108] change uncrustify config to accept the concept definition --- .uncrustify.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.uncrustify.cfg b/.uncrustify.cfg index ab316995b7..9bb2f29c10 100644 --- a/.uncrustify.cfg +++ b/.uncrustify.cfg @@ -98,7 +98,7 @@ eat_blanks_after_open_brace=true eat_blanks_before_close_brace=true mod_pawn_semicolon=false mod_full_paren_if_bool=false -mod_remove_extra_semicolon=true +mod_remove_extra_semicolon=false mod_sort_import=false mod_sort_using=false mod_sort_include=false From 21696c95493c52edb840eb7e1e8041cc60401d7c Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Sat, 12 Apr 2025 13:22:33 +0200 Subject: [PATCH 092/108] fix concept definition --- src/search/task_proxy.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index a86a314097..28cea153a1 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -92,7 +92,7 @@ using PackedStateBin = int_packer::IntPacker::Bin; */ template -concept indexable = requires(Container container, std::size_t i) { +concept indexable = requires(Container &container, std::size_t i) { requires std::same_as>; { container.size() From 26267ad45b1f40dd45d8b3f27ac767cd84bf678b Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Sat, 12 Apr 2025 13:25:33 +0200 Subject: [PATCH 093/108] 'fix' style --- src/search/task_proxy.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 28cea153a1..6319ce404d 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -92,7 +92,7 @@ using PackedStateBin = int_packer::IntPacker::Bin; */ template -concept indexable = requires(Container &container, std::size_t i) { +concept indexable = requires(Container & container, std::size_t i) { requires std::same_as>; { container.size() From c3fe9852752fa8604d1a895c9b50e199e7996a6e Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Sat, 12 Apr 2025 18:58:10 +0200 Subject: [PATCH 094/108] turn iterator classes into true iterators --- src/search/task_proxy.h | 41 +++++++++++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 6319ce404d..7a67f03931 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -113,17 +113,14 @@ class ProxyIterator { const ProxyCollection *collection; std::size_t pos; public: - using iterator_category = std::input_iterator_tag; using value_type = decltype((*collection)[0]); using difference_type = int; - using pointer = const value_type *; - using reference = value_type; ProxyIterator(const ProxyCollection &collection, std::size_t pos) : collection(&collection), pos(pos) { } - reference operator*() const { + value_type operator*() const { return (*collection)[pos]; } @@ -202,11 +199,13 @@ class FactsProxyIterator { int var_id; int value; public: + using value_type = FactProxy; + using difference_type = int; + FactsProxyIterator(const AbstractTask &task, int var_id, int value) : task(&task), var_id(var_id), value(value) {} - ~FactsProxyIterator() = default; - FactProxy operator*() const { + value_type operator*() const { return FactProxy(*task, var_id, value); } @@ -222,6 +221,12 @@ class FactsProxyIterator { return *this; } + value_type operator++(int) { + value_type fact(**this); + ++(*this); + return fact; + } + bool operator==(const FactsProxyIterator &other) const { assert(task == other.task); return var_id == other.var_id && value == other.value; @@ -862,14 +867,17 @@ inline const std::vector &State::get_unpacked_values() const { template<> class ProxyIterator { const State *state; - const VariablesProxy variables; + VariablesProxy variables; int var_id; public: + using difference_type = int; + using value_type = FactProxy; + ProxyIterator(const State &state, int var_id) : state(&state), variables(state.get_task().get_variables()), var_id(var_id) { } - FactProxy operator*() const { + value_type operator*() const { return (*state)[variables[var_id]]; } @@ -879,6 +887,12 @@ class ProxyIterator { return *this; } + value_type operator++(int) { + value_type fact(**this); + ++(*this); + return fact; + } + bool operator==(const ProxyIterator &other) const { assert(state == other.state); return var_id == other.var_id; @@ -889,5 +903,16 @@ class ProxyIterator { } }; +static_assert(std::input_iterator>); +static_assert(std::input_iterator>); +static_assert(std::input_iterator>); +static_assert(std::input_iterator>); +static_assert(std::input_iterator); +static_assert(std::input_iterator>); +static_assert(std::input_iterator>); +static_assert(std::input_iterator>); +static_assert(std::input_iterator>); +static_assert(std::input_iterator>); + #endif From 1fee3446c6e0d77efb59693df12d663cf9ab06a9 Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Sat, 12 Apr 2025 20:13:10 +0200 Subject: [PATCH 095/108] C++-20 supports default implementations of == and !=. --- src/search/task_proxy.h | 22 ++-------------------- 1 file changed, 2 insertions(+), 20 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 7a67f03931..8553c46901 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -135,14 +135,7 @@ class ProxyIterator { return *this; } - bool operator==(const ProxyIterator &other) const { - assert(collection == other.collection); - return pos == other.pos; - } - - bool operator!=(const ProxyIterator &other) const { - return !(*this == other); - } + bool operator==(const ProxyIterator &other) const = default; }; template @@ -227,14 +220,7 @@ class FactsProxyIterator { return fact; } - bool operator==(const FactsProxyIterator &other) const { - assert(task == other.task); - return var_id == other.var_id && value == other.value; - } - - bool operator!=(const FactsProxyIterator &other) const { - return !(*this == other); - } + bool operator==(const FactsProxyIterator &other) const = default; }; @@ -897,10 +883,6 @@ class ProxyIterator { assert(state == other.state); return var_id == other.var_id; } - - bool operator!=(const ProxyIterator &other) const { - return !(*this == other); - } }; static_assert(std::input_iterator>); From 96a31adc2144a1540b5012e58423be2a10467ebd Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Sat, 12 Apr 2025 20:29:54 +0200 Subject: [PATCH 096/108] use nested iterator in state iterator --- src/search/task_proxy.h | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 8553c46901..4082c23265 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -853,23 +853,21 @@ inline const std::vector &State::get_unpacked_values() const { template<> class ProxyIterator { const State *state; - VariablesProxy variables; - int var_id; + ProxyIterator pos; public: using difference_type = int; using value_type = FactProxy; ProxyIterator(const State &state, int var_id) - : state(&state), variables(state.get_task().get_variables()), var_id(var_id) { + : state(&state), pos(state.get_task().get_variables(), var_id) { } value_type operator*() const { - return (*state)[variables[var_id]]; + return (*state)[*pos]; } ProxyIterator &operator++() { - assert(var_id < variables.size()); - ++var_id; + ++pos; return *this; } @@ -879,10 +877,7 @@ class ProxyIterator { return fact; } - bool operator==(const ProxyIterator &other) const { - assert(state == other.state); - return var_id == other.var_id; - } + bool operator==(const ProxyIterator &other) const = default; }; static_assert(std::input_iterator>); From 17570ed0efaa8f21e9fd1e8394d519c8f480ea50 Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Sun, 13 Apr 2025 00:40:51 +0200 Subject: [PATCH 097/108] avoid stale reference --- src/search/task_proxy.h | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 4082c23265..45a3dc25e7 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -599,6 +599,7 @@ class State { std::size_t size() const; int operator[](std::size_t var_id) const; FactProxy operator[](VariableProxy var) const; + FactProxy get_fact(std::size_t var_id) const; TaskProxy get_task() const; @@ -809,7 +810,11 @@ inline int State::operator[](std::size_t var_id) const { } inline FactProxy State::operator[](VariableProxy var) const { - return FactProxy(*task, var.get_id(), (*this)[var.get_id()]); + return get_fact(var.get_id()); +} + +inline FactProxy State::get_fact(std::size_t var_id) const { + return FactProxy(*task, var_id, (*this)[var_id]); } inline TaskProxy State::get_task() const { @@ -853,21 +858,21 @@ inline const std::vector &State::get_unpacked_values() const { template<> class ProxyIterator { const State *state; - ProxyIterator pos; + int var_id; public: using difference_type = int; using value_type = FactProxy; ProxyIterator(const State &state, int var_id) - : state(&state), pos(state.get_task().get_variables(), var_id) { + : state(&state), var_id(var_id) { } value_type operator*() const { - return (*state)[*pos]; + return state->get_fact(var_id); } ProxyIterator &operator++() { - ++pos; + ++var_id; return *this; } From ecd7e2e2c1852fbd0f6cb7da199f46babeaafd9d Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Mon, 14 Apr 2025 18:44:22 +0200 Subject: [PATCH 098/108] try reintroducing reference and pointer typedefs for MSVC. --- src/search/task_proxy.h | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 45a3dc25e7..14dbd53388 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -114,7 +114,9 @@ class ProxyIterator { std::size_t pos; public: using value_type = decltype((*collection)[0]); - using difference_type = int; + using difference_type = int; // unused but required by the iterator concept + using reference = value_type; // unused but required by older MSVC versions + using pointer = value_type*; // unused but required by older MSVC versions ProxyIterator(const ProxyCollection &collection, std::size_t pos) : collection(&collection), pos(pos) { @@ -193,7 +195,9 @@ class FactsProxyIterator { int value; public: using value_type = FactProxy; - using difference_type = int; + using difference_type = int; // unused but required by the iterator concept + using reference = FactProxy; // unused but required by older MSVC versions + using pointer = FactProxy*; // unused but required by older MSVC versions FactsProxyIterator(const AbstractTask &task, int var_id, int value) : task(&task), var_id(var_id), value(value) {} @@ -860,8 +864,10 @@ class ProxyIterator { const State *state; int var_id; public: - using difference_type = int; + using difference_type = int; // unused but required by the iterator concept using value_type = FactProxy; + using reference = FactProxy; // unused but required by older MSVC versions + using pointer = FactProxy*; // unused but required by older MSVC versions ProxyIterator(const State &state, int var_id) : state(&state), var_id(var_id) { From 557be03a5e2fb32418838580d6f1138cd0833f56 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 22 Apr 2025 13:25:11 +0200 Subject: [PATCH 099/108] Split apart computation of conditional noops. --- src/search/landmarks/landmark_factory_h_m.cc | 88 +++++++++++--------- src/search/landmarks/landmark_factory_h_m.h | 5 ++ 2 files changed, 53 insertions(+), 40 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index dc551f6051..f38ff140f5 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -162,7 +162,7 @@ void LandmarkFactoryHM::get_split_m_sets( subsets.push_back(current); } return; - } + } if (current_index1 != superset1_size && (current_index2 == superset2_size || @@ -174,20 +174,20 @@ void LandmarkFactoryHM::get_split_m_sets( get_split_m_sets( variables, num_included1, num_included2, current_index1 + 1, current_index2, current, subsets, superset1, superset2); - } else { - /* - Switching order of 1 and 2 here to avoid code duplication in the form - of a function `get_split_m_sets_including_current_proposition_from_second` - analogous to `get_split_m_sets_including_current_proposition_from_first`. - */ - get_split_m_sets_including_current_proposition_from_first( - variables, num_included2, num_included1, current_index2, - current_index1, current, subsets, superset2, superset1); - // Do not include proposition at `current_index2` in set. - get_split_m_sets( - variables, num_included1, num_included2, current_index1, - current_index2 + 1, current, subsets, superset1, superset2); - } + } else { + /* + Switching order of 1 and 2 here to avoid code duplication in the form + of a function `get_split_m_sets_including_current_proposition_from_second` + analogous to `get_split_m_sets_including_current_proposition_from_first`. + */ + get_split_m_sets_including_current_proposition_from_first( + variables, num_included2, num_included1, current_index2, + current_index1, current, subsets, superset2, superset1); + // Do not include proposition at `current_index2` in set. + get_split_m_sets( + variables, num_included1, num_included2, current_index1, + current_index2 + 1, current, subsets, superset1, superset2); + } } // Get partial assignments of size <= m in the problem. @@ -472,29 +472,11 @@ Propositions LandmarkFactoryHM::initialize_postconditions( return postcondition; } -void LandmarkFactoryHM::add_conditional_noop( - PiMOperator &pm_op, int op_id, - const VariablesProxy &variables, const Propositions &propositions, - const Propositions &preconditions, const Propositions &postconditions) { - int noop_index = static_cast(pm_op.conditional_noops.size()); - - /* - Get the subsets that have >= 1 element in the precondition (unless - the precondition is empty) or the postcondition and >= 1 element - in the `propositions` set. - */ - vector noop_precondition_subsets = - get_split_m_sets(variables, preconditions, propositions); - vector noop_postconditions_subsets = - get_split_m_sets(variables, postconditions, propositions); - - num_unsatisfied_preconditions[op_id].second.push_back( - static_cast(noop_precondition_subsets.size())); - - // Compute the conditional noop preconditions. +vector LandmarkFactoryHM::compute_noop_precondition( + const vector &preconditions, int op_id, int noop_index) { vector noop_condition; - noop_condition.reserve(noop_precondition_subsets.size()); - for (const auto &subset : noop_precondition_subsets) { + noop_condition.reserve(preconditions.size()); + for (const auto &subset : preconditions) { assert(static_cast(subset.size()) <= m); assert(set_indices.contains(subset)); int set_index = set_indices[subset]; @@ -502,17 +484,43 @@ void LandmarkFactoryHM::add_conditional_noop( // These propositions are "conditional preconditions" for this operator. hm_table[set_index].triggered_operators.emplace_back(op_id, noop_index); } + return noop_condition; +} - // Compute the conditional noop effects. +vector LandmarkFactoryHM::compute_noop_effect( + const vector &postconditions) { vector noop_effect; - noop_effect.reserve(noop_postconditions_subsets.size()); - for (const auto &subset : noop_postconditions_subsets) { + noop_effect.reserve(postconditions.size()); + for (const auto &subset: postconditions) { assert(static_cast(subset.size()) <= m); assert(set_indices.contains(subset)); int set_index = set_indices[subset]; noop_effect.push_back(set_index); } + return noop_effect; +} + +void LandmarkFactoryHM::add_conditional_noop( + PiMOperator &pm_op, int op_id, + const VariablesProxy &variables, const Propositions &propositions, + const Propositions &preconditions, const Propositions &postconditions) { + int noop_index = static_cast(pm_op.conditional_noops.size()); + /* + Get the subsets that have >= 1 element in the precondition (unless + the precondition is empty) or the postcondition and >= 1 element + in the `propositions` set. + */ + vector noop_preconditions_subsets = + get_split_m_sets(variables, preconditions, propositions); + vector noop_postconditions_subsets = + get_split_m_sets(variables, postconditions, propositions); + + num_unsatisfied_preconditions[op_id].second.push_back( + static_cast(noop_preconditions_subsets.size())); + vector noop_condition = compute_noop_precondition( + noop_preconditions_subsets, op_id, noop_index); + vector noop_effect = compute_noop_effect(noop_postconditions_subsets); pm_op.conditional_noops.emplace_back( move(noop_condition), move(noop_effect)); } diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 8781eb2c18..fd00148872 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -141,6 +141,11 @@ class LandmarkFactoryHM : public LandmarkFactory { Propositions initialize_postconditions( const VariablesProxy &variables, const OperatorProxy &op, PiMOperator &pm_op); + std::vector compute_noop_precondition( + const std::vector &preconditions, int op_id, + int noop_index); + std::vector compute_noop_effect( + const std::vector &postconditions); void add_conditional_noop( PiMOperator &pm_op, int op_id, const VariablesProxy &variables, const Propositions &propositions, From ffce23ec5742cf044a6c051fa459e30cda519ae8 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 22 Apr 2025 13:30:54 +0200 Subject: [PATCH 100/108] Fix style. --- src/search/landmarks/landmark_factory_h_m.cc | 30 ++++++++++---------- src/search/landmarks/landmark_factory_h_m.h | 2 +- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index f38ff140f5..14d4ffc503 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -162,7 +162,7 @@ void LandmarkFactoryHM::get_split_m_sets( subsets.push_back(current); } return; - } + } if (current_index1 != superset1_size && (current_index2 == superset2_size || @@ -174,20 +174,20 @@ void LandmarkFactoryHM::get_split_m_sets( get_split_m_sets( variables, num_included1, num_included2, current_index1 + 1, current_index2, current, subsets, superset1, superset2); - } else { - /* - Switching order of 1 and 2 here to avoid code duplication in the form - of a function `get_split_m_sets_including_current_proposition_from_second` - analogous to `get_split_m_sets_including_current_proposition_from_first`. - */ - get_split_m_sets_including_current_proposition_from_first( - variables, num_included2, num_included1, current_index2, - current_index1, current, subsets, superset2, superset1); - // Do not include proposition at `current_index2` in set. - get_split_m_sets( - variables, num_included1, num_included2, current_index1, - current_index2 + 1, current, subsets, superset1, superset2); - } + } else { + /* + Switching order of 1 and 2 here to avoid code duplication in the form + of a function `get_split_m_sets_including_current_proposition_from_second` + analogous to `get_split_m_sets_including_current_proposition_from_first`. + */ + get_split_m_sets_including_current_proposition_from_first( + variables, num_included2, num_included1, current_index2, + current_index1, current, subsets, superset2, superset1); + // Do not include proposition at `current_index2` in set. + get_split_m_sets( + variables, num_included1, num_included2, current_index1, + current_index2 + 1, current, subsets, superset1, superset2); + } } // Get partial assignments of size <= m in the problem. diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index fd00148872..877229f320 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -103,7 +103,7 @@ class LandmarkFactoryHM : public LandmarkFactory { const State &state, const VariablesProxy &variables); void collect_condition_landmarks( const std::vector &condition, - std::vector &landmarks) const; + std::vector &landmarks) const; void initialize_proposition_landmark( int op_id, HMEntry &hm_entry, const std::vector &landmarks, const std::vector &precondition_landmarks, From bb2519549f5f26a191ca66169276ef1c0e3ab168 Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Tue, 22 Apr 2025 15:53:47 +0200 Subject: [PATCH 101/108] use windows 2025 instead of 2019 --- .github/workflows/windows.yml | 4 ++-- src/search/task_proxy.h | 6 ------ 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 7c631a8970..5a87339cba 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -26,8 +26,8 @@ jobs: strategy: matrix: platform: - - {os: "windows-2022", vc: "C:\\Program Files\\Microsoft Visual Studio\\2022\\Enterprise\\VC\\Auxiliary\\Build\\vcvarsall.bat"} - - {os: "windows-2019", vc: "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Enterprise\\VC\\Auxiliary\\Build\\vcvarsall.bat"} + - {os: "windows-2022", vc: "C:\\Program Files\\Microsoft Visual Studio\\2022\\Enterprise\\VC\\Auxiliary\\Build\\vcvarsall.bat"} + - {os: "windows-2025", vc: "C:\\Program Files\\Microsoft Visual Studio\\2022\\Enterprise\\VC\\Auxiliary\\Build\\vcvarsall.bat"} python-version: [3.8] steps: - name: Clone repository diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 14dbd53388..58c2036965 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -115,8 +115,6 @@ class ProxyIterator { public: using value_type = decltype((*collection)[0]); using difference_type = int; // unused but required by the iterator concept - using reference = value_type; // unused but required by older MSVC versions - using pointer = value_type*; // unused but required by older MSVC versions ProxyIterator(const ProxyCollection &collection, std::size_t pos) : collection(&collection), pos(pos) { @@ -196,8 +194,6 @@ class FactsProxyIterator { public: using value_type = FactProxy; using difference_type = int; // unused but required by the iterator concept - using reference = FactProxy; // unused but required by older MSVC versions - using pointer = FactProxy*; // unused but required by older MSVC versions FactsProxyIterator(const AbstractTask &task, int var_id, int value) : task(&task), var_id(var_id), value(value) {} @@ -866,8 +862,6 @@ class ProxyIterator { public: using difference_type = int; // unused but required by the iterator concept using value_type = FactProxy; - using reference = FactProxy; // unused but required by older MSVC versions - using pointer = FactProxy*; // unused but required by older MSVC versions ProxyIterator(const State &state, int var_id) : state(&state), var_id(var_id) { From 57012472bf7cc795a54476d5b44d20331034a9c6 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Tue, 22 Apr 2025 17:42:51 +0200 Subject: [PATCH 102/108] Change parameter name to match command line. --- src/search/landmarks/landmark_sum_heuristic.cc | 7 +++---- src/search/landmarks/landmark_sum_heuristic.h | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/search/landmarks/landmark_sum_heuristic.cc b/src/search/landmarks/landmark_sum_heuristic.cc index 2d8dd4d5fc..f8fa63e91c 100644 --- a/src/search/landmarks/landmark_sum_heuristic.cc +++ b/src/search/landmarks/landmark_sum_heuristic.cc @@ -31,7 +31,7 @@ static bool are_dead_ends_reliable( } LandmarkSumHeuristic::LandmarkSumHeuristic( - const shared_ptr &landmark_factory, + const shared_ptr &lm_factory, bool pref, bool prog_goal, bool prog_gn, bool prog_r, const shared_ptr &transform, bool cache_estimates, const string &description, utils::Verbosity verbosity, @@ -40,12 +40,11 @@ LandmarkSumHeuristic::LandmarkSumHeuristic( pref, tasks::get_default_value_axioms_task_if_needed(transform, axioms), cache_estimates, description, verbosity), - dead_ends_reliable( - are_dead_ends_reliable(landmark_factory, task_proxy)) { + dead_ends_reliable(are_dead_ends_reliable(lm_factory, task_proxy)) { if (log.is_at_least_normal()) { log << "Initializing landmark sum heuristic..." << endl; } - initialize(landmark_factory, prog_goal, prog_gn, prog_r); + initialize(lm_factory, prog_goal, prog_gn, prog_r); compute_landmark_costs(); } diff --git a/src/search/landmarks/landmark_sum_heuristic.h b/src/search/landmarks/landmark_sum_heuristic.h index 625a746e7b..a3b1eef367 100644 --- a/src/search/landmarks/landmark_sum_heuristic.h +++ b/src/search/landmarks/landmark_sum_heuristic.h @@ -23,7 +23,7 @@ class LandmarkSumHeuristic : public LandmarkHeuristic { int get_heuristic_value(const State &ancestor_state) override; public: LandmarkSumHeuristic( - const std::shared_ptr &landmark_factory, bool pref, + const std::shared_ptr &lm_factory, bool pref, bool prog_goal, bool prog_gn, bool prog_r, const std::shared_ptr &transform, bool cache_estimates, const std::string &description, From f6c4b30761fde1668776e64cfeaad8b0004fe54d Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 2 Apr 2025 09:40:03 +0200 Subject: [PATCH 103/108] Represent landmark type using Enum. --- src/search/landmarks/landmark.cc | 4 +-- src/search/landmarks/landmark.h | 27 ++++++++++--------- src/search/landmarks/landmark_factory_h_m.cc | 6 ++--- .../landmarks/landmark_factory_merged.cc | 10 +++---- .../landmark_factory_reasonable_orders_hps.cc | 17 ++++++------ .../landmarks/landmark_factory_rpg_exhaust.cc | 4 +-- .../landmarks/landmark_factory_rpg_sasp.cc | 18 ++++++------- .../landmarks/landmark_factory_zhu_givan.cc | 6 ++--- src/search/landmarks/landmark_graph.cc | 26 +++++++++++------- src/search/landmarks/landmark_heuristic.cc | 4 +-- src/search/landmarks/util.cc | 2 +- 11 files changed, 67 insertions(+), 57 deletions(-) diff --git a/src/search/landmarks/landmark.cc b/src/search/landmarks/landmark.cc index 77766659c1..018e3ea66d 100644 --- a/src/search/landmarks/landmark.cc +++ b/src/search/landmarks/landmark.cc @@ -7,11 +7,11 @@ bool Landmark::is_true_in_state(const State &state) const { auto is_atom_true_in_state = [&](const FactPair &atom) { return state[atom.var].get_value() == atom.value; }; - if (is_disjunctive) { + if (type == DISJUNCTIVE) { return ranges::any_of( atoms.cbegin(), atoms.cend(), is_atom_true_in_state); } else { - // Is conjunctive or simple. + assert(type == CONJUNCTIVE || type == SIMPLE); return ranges::all_of( atoms.cbegin(), atoms.cend(), is_atom_true_in_state); } diff --git a/src/search/landmarks/landmark.h b/src/search/landmarks/landmark.h index ee1b324088..aa4ff7ea84 100644 --- a/src/search/landmarks/landmark.h +++ b/src/search/landmarks/landmark.h @@ -6,17 +6,20 @@ #include namespace landmarks { +enum LandmarkType { + DISJUNCTIVE, + SIMPLE, + CONJUNCTIVE, +}; class Landmark { public: - Landmark(std::vector _atoms, bool is_disjunctive, - bool is_conjunctive, bool is_true_in_goal = false, - bool is_derived = false) - : atoms(move(_atoms)), is_disjunctive(is_disjunctive), - is_conjunctive(is_conjunctive), is_true_in_goal(is_true_in_goal), - is_derived(is_derived) { - assert(!(is_conjunctive && is_disjunctive)); - assert((is_conjunctive && atoms.size() > 1) || - (is_disjunctive && atoms.size() > 1) || atoms.size() == 1); + Landmark(std::vector _atoms, LandmarkType type, + bool is_true_in_goal = false, bool is_derived = false) + : atoms(move(_atoms)), type(type), + is_true_in_goal(is_true_in_goal), is_derived(is_derived) { + assert((type == DISJUNCTIVE && atoms.size() > 1) || + (type == CONJUNCTIVE && atoms.size() > 1) || + (type == SIMPLE && atoms.size() == 1)); } bool operator==(const Landmark &other) const { @@ -27,9 +30,9 @@ class Landmark { return !(*this == other); } - std::vector atoms; - const bool is_disjunctive; - const bool is_conjunctive; + const std::vector atoms; + const LandmarkType type; + bool is_conjunctive; bool is_true_in_goal; bool is_derived; diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 14d4ffc503..0ffcbdd1b3 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -626,7 +626,7 @@ void LandmarkFactoryHM::discard_conjunctive_landmarks() { } landmark_graph->remove_node_if( [](const LandmarkNode &node) { - return node.get_landmark().is_conjunctive; + return node.get_landmark().type == CONJUNCTIVE; }); } @@ -926,8 +926,8 @@ void LandmarkFactoryHM::add_landmark_node(int set_index, bool goal) { vector facts(hm_entry.propositions); utils::sort_unique(facts); assert(!facts.empty()); - bool conjunctive = facts.size() > 1; - Landmark landmark(move(facts), false, conjunctive, goal); + LandmarkType type = facts.size() == 1 ? SIMPLE : CONJUNCTIVE; + Landmark landmark(move(facts), type, goal); landmark.first_achievers.insert(hm_entry.first_achievers.begin(), hm_entry.first_achievers.end()); landmark_nodes[set_index] = diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index 6749da9dbf..390c716b30 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -26,7 +26,7 @@ LandmarkFactoryMerged::LandmarkFactoryMerged( LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( const Landmark &landmark) const { - if (landmark.is_disjunctive) { + if (landmark.type == DISJUNCTIVE) { utils::HashSet atoms( landmark.atoms.begin(), landmark.atoms.end()); if (landmark_graph->contains_superset_disjunctive_landmark(atoms)) { @@ -36,7 +36,7 @@ LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( return nullptr; } - if (landmark.is_conjunctive) { + if (landmark.type == CONJUNCTIVE) { cerr << "Don't know how to handle conjunctive landmarks yet..." << endl; utils::exit_with(ExitCode::SEARCH_UNSUPPORTED); } @@ -71,12 +71,12 @@ void LandmarkFactoryMerged::add_simple_landmarks( // TODO: Loop over landmarks instead. for (const auto &node : *graph_to_merge) { const Landmark &landmark = node->get_landmark(); - if (landmark.is_conjunctive) { + if (landmark.type == CONJUNCTIVE) { cerr << "Don't know how to handle conjunctive landmarks yet" << endl; utils::exit_with(ExitCode::SEARCH_UNSUPPORTED); } - if (landmark.is_disjunctive) { + if (landmark.type == DISJUNCTIVE) { continue; } assert(landmark.atoms.size() == 1); @@ -96,7 +96,7 @@ void LandmarkFactoryMerged::add_disjunctive_landmarks( for (const shared_ptr &graph_to_merge : landmark_graphs) { for (const auto &node : *graph_to_merge) { const Landmark &landmark = node->get_landmark(); - if (landmark.is_disjunctive) { + if (landmark.type == DISJUNCTIVE) { /* TODO: It seems that disjunctive landmarks are only added if none of the atoms it is made of is also there as a simple diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index acf052efd2..4824d16b95 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -45,7 +45,7 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_goal_orderings( assert(landmark.is_true_in_goal); for (const auto &other : *landmark_graph) { const Landmark &other_landmark = other->get_landmark(); - if (landmark == other_landmark || other_landmark.is_disjunctive) { + if (landmark == other_landmark || other_landmark.type == DISJUNCTIVE) { continue; } if (interferes(task_proxy, other_landmark, landmark)) { @@ -72,7 +72,7 @@ static unordered_set collect_reasonable_ordering_candidates( if (type >= OrderingType::GREEDY_NECESSARY) { // Found a landmark such that `node` ->_gn `child`. for (const auto &[parent, parent_type]: child->parents) { - if (parent->get_landmark().is_disjunctive) { + if (parent->get_landmark().type == DISJUNCTIVE) { continue; } if (parent_type >= OrderingType::NATURAL && *parent != node) { @@ -95,7 +95,7 @@ void LandmarkFactoryReasonableOrdersHPS::insert_reasonable_orderings( LandmarkNode &node, const Landmark &landmark) const { for (LandmarkNode *other : candidates) { const Landmark &other_landmark = other->get_landmark(); - if (landmark == other_landmark || other_landmark.is_disjunctive) { + if (landmark == other_landmark || other_landmark.type == DISJUNCTIVE) { continue; } if (interferes(task_proxy, other_landmark, landmark)) { @@ -128,7 +128,7 @@ void LandmarkFactoryReasonableOrdersHPS::approximate_reasonable_orderings( State initial_state = task_proxy.get_initial_state(); for (const auto &node : *landmark_graph) { const Landmark &landmark = node->get_landmark(); - if (landmark.is_disjunctive) { + if (landmark.type == DISJUNCTIVE) { continue; } @@ -328,7 +328,7 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( Skip this case for conjunctive landmarks A, as they are typically achieved through a sequence of operators successively adding the parts of A. */ - if (landmark_a.is_conjunctive) { + if (landmark_a.type == CONJUNCTIVE) { return false; } utils::HashSet shared_effects = @@ -365,8 +365,8 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( const TaskProxy &task_proxy, const Landmark &landmark_a, const Landmark &landmark_b) const { assert(landmark_a != landmark_b); - assert(!landmark_a.is_disjunctive); - assert(!landmark_b.is_disjunctive); + assert(!landmark_a.type == DISJUNCTIVE); + assert(!landmark_b.type == DISJUNCTIVE); VariablesProxy variables = task_proxy.get_variables(); for (const FactPair &atom_b : landmark_b.atoms) { @@ -374,7 +374,8 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( for (const FactPair &atom_a : landmark_a.atoms) { FactProxy a = variables[atom_a.var].get_fact(atom_a.value); if (atom_a == atom_b) { - if (landmark_a.is_conjunctive && landmark_b.is_conjunctive) { + if (landmark_a.type == CONJUNCTIVE && + landmark_b.type == CONJUNCTIVE) { continue; } return false; diff --git a/src/search/landmarks/landmark_factory_rpg_exhaust.cc b/src/search/landmarks/landmark_factory_rpg_exhaust.cc index d237ba2a76..a8ea64628d 100644 --- a/src/search/landmarks/landmark_factory_rpg_exhaust.cc +++ b/src/search/landmarks/landmark_factory_rpg_exhaust.cc @@ -44,7 +44,7 @@ static bool relaxed_task_solvable( void LandmarkFactoryRpgExhaust::generate_goal_landmarks( const TaskProxy &task_proxy) const { for (FactProxy goal : task_proxy.get_goals()) { - Landmark landmark({goal.get_pair()}, false, false, true); + Landmark landmark({goal.get_pair()}, SIMPLE, true); landmark_graph->add_landmark(move(landmark)); } } @@ -55,7 +55,7 @@ void LandmarkFactoryRpgExhaust::generate_all_simple_landmarks( for (int value = 0; value < var.get_domain_size(); ++value) { const FactPair atom(var.get_id(), value); if (!landmark_graph->contains_simple_landmark(atom)) { - Landmark landmark({atom}, false, false); + Landmark landmark({atom}, SIMPLE); if (!relaxed_task_solvable(task_proxy, exploration, landmark, use_unary_relaxation)) { landmark_graph->add_landmark(move(landmark)); diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 9ff1485280..3d8ef839a9 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -228,9 +228,7 @@ void LandmarkFactoryRpgSasp::remove_disjunctive_landmark_and_rewire_orderings( those incoming orderings with natural orderings. */ const Landmark &landmark = simple_landmark_node.get_landmark(); - assert(!landmark.is_conjunctive); - assert(!landmark.is_disjunctive); - assert(landmark.atoms.size() == 1); + assert(landmark.type == SIMPLE); LandmarkNode *disjunctive_landmark_node = &landmark_graph->get_disjunctive_landmark_node(landmark.atoms[0]); remove_occurrences_of_landmark_node(disjunctive_landmark_node); @@ -257,7 +255,7 @@ void LandmarkFactoryRpgSasp::add_simple_landmark_and_ordering( return; } - Landmark landmark({atom}, false, false); + Landmark landmark({atom}, SIMPLE); LandmarkNode &simple_landmark_node = landmark_graph->add_landmark(move(landmark)); open_landmarks.push_back(&simple_landmark_node); @@ -306,8 +304,8 @@ void LandmarkFactoryRpgSasp::add_disjunctive_landmark_and_ordering( /* Only add the landmark to the landmark graph if it does not overlap with an existing landmark. */ if (!overlaps) { - Landmark landmark(vector(atoms.begin(), atoms.end()), - true, false); + Landmark landmark( + vector(atoms.begin(), atoms.end()), DISJUNCTIVE); LandmarkNode *new_landmark_node = &landmark_graph->add_landmark(move(landmark)); open_landmarks.push_back(new_landmark_node); @@ -505,7 +503,7 @@ vector> LandmarkFactoryRpgSasp::compute_disjunctive_pre void LandmarkFactoryRpgSasp::generate_goal_landmarks( const TaskProxy &task_proxy) { for (FactProxy goal : task_proxy.get_goals()) { - Landmark landmark({goal.get_pair()}, false, false, true); + Landmark landmark({goal.get_pair()}, SIMPLE, true); LandmarkNode &node = landmark_graph->add_landmark(move(landmark)); open_landmarks.push_back(&node); } @@ -655,7 +653,7 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orderings( const Landmark &landmark = node->get_landmark(); forward_orderings[node] = compute_atoms_unreachable_without_landmark( variables, landmark, reached); - if (landmark.is_disjunctive || landmark.is_conjunctive) { + if (landmark.type != SIMPLE) { return; } assert(landmark.atoms.size() == 1); @@ -674,7 +672,7 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orderings( bool LandmarkFactoryRpgSasp::atom_and_landmark_achievable_together( const FactPair &atom, const Landmark &landmark) const { - assert(!landmark.is_conjunctive); + assert(landmark.type != CONJUNCTIVE); for (const FactPair &landmark_atom : landmark.atoms) { if (atom == landmark_atom) { return true; @@ -747,7 +745,7 @@ void LandmarkFactoryRpgSasp::discard_disjunctive_landmarks() const { } landmark_graph->remove_node_if( [](const LandmarkNode &node) { - return node.get_landmark().is_disjunctive; + return node.get_landmark().type == DISJUNCTIVE; }); } } diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index 77e09ada33..755e8ec7a6 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -46,7 +46,7 @@ bool LandmarkFactoryZhuGivan::goal_is_reachable( if (log.is_at_least_normal()) { log << "Problem not solvable, even if relaxed." << endl; } - Landmark landmark({goal.get_pair()}, false, false, true); + Landmark landmark({goal.get_pair()}, SIMPLE, true); landmark_graph->add_landmark(move(landmark)); return false; } @@ -61,7 +61,7 @@ LandmarkNode *LandmarkFactoryZhuGivan::create_goal_landmark( node = &landmark_graph->get_simple_landmark_node(goal); node->get_landmark().is_true_in_goal = true; } else { - Landmark landmark({goal}, false, false, true); + Landmark landmark({goal}, SIMPLE, true); node = &landmark_graph->add_landmark(move(landmark)); } return node; @@ -83,7 +83,7 @@ void LandmarkFactoryZhuGivan::extract_landmarks_and_orderings_from_goal_labels( if (landmark_graph->contains_simple_landmark(atom)) { node = &landmark_graph->get_simple_landmark_node(atom); } else { - Landmark landmark({atom}, false, false); + Landmark landmark({atom}, SIMPLE); node = &landmark_graph->add_landmark(move(landmark)); } if (use_orders) { diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index c4b5519b85..67a5d566f7 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -91,7 +91,7 @@ LandmarkNode *LandmarkGraph::add_node(Landmark &&landmark) { } LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark_to_add) { - assert(landmark_to_add.is_conjunctive || all_of( + assert(landmark_to_add.type == CONJUNCTIVE || all_of( landmark_to_add.atoms.begin(), landmark_to_add.atoms.end(), [&](const FactPair &atom) {return !contains_landmark(atom);})); /* @@ -103,15 +103,19 @@ LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark_to_add) { LandmarkNode *new_node = add_node(move(landmark_to_add)); const Landmark &landmark = new_node->get_landmark(); - if (landmark.is_disjunctive) { + switch (landmark.type) { + case DISJUNCTIVE: for (const FactPair &atom : landmark.atoms) { disjunctive_landmarks_to_nodes.emplace(atom, new_node); } ++num_disjunctive_landmarks; - } else if (landmark.is_conjunctive) { - ++num_conjunctive_landmarks; - } else { + break; + case SIMPLE: simple_landmarks_to_nodes.emplace(landmark.atoms.front(), new_node); + break; + case CONJUNCTIVE: + ++num_conjunctive_landmarks; + break; } return *new_node; } @@ -126,15 +130,19 @@ void LandmarkGraph::remove_node_occurrences(LandmarkNode *node) { assert(!child->parents.contains(node)); } const Landmark &landmark = node->get_landmark(); - if (landmark.is_disjunctive) { + switch (landmark.type) { + case DISJUNCTIVE: --num_disjunctive_landmarks; for (const FactPair &atom : landmark.atoms) { disjunctive_landmarks_to_nodes.erase(atom); } - } else if (landmark.is_conjunctive) { - --num_conjunctive_landmarks; - } else { + break; + case SIMPLE: simple_landmarks_to_nodes.erase(landmark.atoms[0]); + break; + case CONJUNCTIVE: + --num_conjunctive_landmarks; + break; } } diff --git a/src/search/landmarks/landmark_heuristic.cc b/src/search/landmarks/landmark_heuristic.cc index 6a1b323558..7e762f06af 100644 --- a/src/search/landmarks/landmark_heuristic.cc +++ b/src/search/landmarks/landmark_heuristic.cc @@ -128,8 +128,8 @@ void LandmarkHeuristic::compute_landmarks_achieved_by_atom() { for (const auto &node : *landmark_graph) { const int id = node->get_id(); const Landmark &landmark = node->get_landmark(); - if (landmark.is_conjunctive) { - /* + if (landmark.type == CONJUNCTIVE) { + /*DrDDr TODO: We currently have no way to declare operators preferred based on conjunctive landmarks. We consider this a bug and want to fix it in issue1072. diff --git a/src/search/landmarks/util.cc b/src/search/landmarks/util.cc index 62245b60df..280f2b3e1a 100644 --- a/src/search/landmarks/util.cc +++ b/src/search/landmarks/util.cc @@ -83,7 +83,7 @@ static void dump_node( utils::LogProxy &log) { if (log.is_at_least_debug()) { const Landmark &landmark = node.get_landmark(); - char delimiter = landmark.is_disjunctive ? '|' : '&'; + char delimiter = landmark.type == DISJUNCTIVE ? '|' : '&'; cout << " lm" << node.get_id() << " [label=\""; bool first = true; for (const FactPair &atom : landmark.atoms) { From 38437545e7e5f5c34426f02f1de819d6c4ee3863 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 23 Apr 2025 14:26:57 +0200 Subject: [PATCH 104/108] Rename simple to atomic and add doc. --- .../cartesian_abstractions/utils_landmarks.cc | 2 +- src/search/landmarks/landmark.cc | 2 +- src/search/landmarks/landmark.h | 19 +++++-- src/search/landmarks/landmark_factory_h_m.cc | 2 +- .../landmarks/landmark_factory_merged.cc | 12 ++-- .../landmarks/landmark_factory_merged.h | 2 +- .../landmark_factory_reasonable_orders_hps.cc | 4 +- .../landmarks/landmark_factory_rpg_exhaust.cc | 10 ++-- .../landmarks/landmark_factory_rpg_exhaust.h | 2 +- .../landmarks/landmark_factory_rpg_sasp.cc | 56 +++++++++---------- .../landmarks/landmark_factory_rpg_sasp.h | 4 +- .../landmarks/landmark_factory_zhu_givan.cc | 14 ++--- src/search/landmarks/landmark_graph.cc | 26 ++++----- src/search/landmarks/landmark_graph.h | 6 +- 14 files changed, 85 insertions(+), 76 deletions(-) diff --git a/src/search/cartesian_abstractions/utils_landmarks.cc b/src/search/cartesian_abstractions/utils_landmarks.cc index dd83b6f2cb..b3a15b1844 100644 --- a/src/search/cartesian_abstractions/utils_landmarks.cc +++ b/src/search/cartesian_abstractions/utils_landmarks.cc @@ -40,7 +40,7 @@ vector get_atom_landmarks(const LandmarkGraph &graph) { utils::HashMap get_atom_to_landmark_map( const shared_ptr &graph) { - // All landmarks are simple, i.e., each has exactly one atom. + // All landmarks are atomic, i.e., each has exactly one atom. assert(all_of(graph->begin(), graph->end(), [](auto &node) { return node->get_landmark().atoms.size() == 1; })); diff --git a/src/search/landmarks/landmark.cc b/src/search/landmarks/landmark.cc index 018e3ea66d..c256bf66de 100644 --- a/src/search/landmarks/landmark.cc +++ b/src/search/landmarks/landmark.cc @@ -11,7 +11,7 @@ bool Landmark::is_true_in_state(const State &state) const { return ranges::any_of( atoms.cbegin(), atoms.cend(), is_atom_true_in_state); } else { - assert(type == CONJUNCTIVE || type == SIMPLE); + assert(type == CONJUNCTIVE || type == ATOMIC); return ranges::all_of( atoms.cbegin(), atoms.cend(), is_atom_true_in_state); } diff --git a/src/search/landmarks/landmark.h b/src/search/landmarks/landmark.h index aa4ff7ea84..bfbec05e0e 100644 --- a/src/search/landmarks/landmark.h +++ b/src/search/landmarks/landmark.h @@ -6,20 +6,30 @@ #include namespace landmarks { +/* + Here, landmarks are formulas over the atoms of the planning task. We support + exactly three specific kinds of formulas: atomic formulas, disjunctions, and + conjunctions. Therefore, we can represent the landmarks as sets of atoms + annotated with their type `ATOMIC`, `DISJUNCTIVE`, or `CONJUNCTIVE`. We assert + that `ATOMIC` landmarks consist of exactly one atom. Even though atomic + formulas can in theory be considered to be disjunctions or conjunctions over + a single atom, we require that `DISJUNCTIVE` and `CONJUNCTIVE` landmarks + consist of at least two atoms. +*/ enum LandmarkType { DISJUNCTIVE, - SIMPLE, + ATOMIC, CONJUNCTIVE, }; + class Landmark { public: Landmark(std::vector _atoms, LandmarkType type, bool is_true_in_goal = false, bool is_derived = false) : atoms(move(_atoms)), type(type), is_true_in_goal(is_true_in_goal), is_derived(is_derived) { - assert((type == DISJUNCTIVE && atoms.size() > 1) || - (type == CONJUNCTIVE && atoms.size() > 1) || - (type == SIMPLE && atoms.size() == 1)); + assert((type == ATOMIC && atoms.size() == 1) || + (type != ATOMIC && atoms.size() > 1)); } bool operator==(const Landmark &other) const { @@ -32,7 +42,6 @@ class Landmark { const std::vector atoms; const LandmarkType type; - bool is_conjunctive; bool is_true_in_goal; bool is_derived; diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index 0ffcbdd1b3..b0ece6cb6c 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -926,7 +926,7 @@ void LandmarkFactoryHM::add_landmark_node(int set_index, bool goal) { vector facts(hm_entry.propositions); utils::sort_unique(facts); assert(!facts.empty()); - LandmarkType type = facts.size() == 1 ? SIMPLE : CONJUNCTIVE; + LandmarkType type = facts.size() == 1 ? ATOMIC : CONJUNCTIVE; Landmark landmark(move(facts), type, goal); landmark.first_achievers.insert(hm_entry.first_achievers.begin(), hm_entry.first_achievers.end()); diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index 390c716b30..a5a011f02a 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -43,8 +43,8 @@ LandmarkNode *LandmarkFactoryMerged::get_matching_landmark( assert(landmark.atoms.size() == 1); const FactPair &atom = landmark.atoms[0]; - if (landmark_graph->contains_simple_landmark(atom)) { - return &landmark_graph->get_simple_landmark_node(atom); + if (landmark_graph->contains_atomic_landmark(atom)) { + return &landmark_graph->get_atomic_landmark_node(atom); } return nullptr; } @@ -62,10 +62,10 @@ vector> LandmarkFactoryMerged::generate_landmark_graph return landmark_graphs; } -void LandmarkFactoryMerged::add_simple_landmarks( +void LandmarkFactoryMerged::add_atomic_landmarks( const vector> &landmark_graphs) const { if (log.is_at_least_normal()) { - log << "Adding simple landmarks" << endl; + log << "Adding atomic landmarks" << endl; } for (const auto &graph_to_merge : landmark_graphs) { // TODO: Loop over landmarks instead. @@ -99,7 +99,7 @@ void LandmarkFactoryMerged::add_disjunctive_landmarks( if (landmark.type == DISJUNCTIVE) { /* TODO: It seems that disjunctive landmarks are only added if - none of the atoms it is made of is also there as a simple + none of the atoms it is made of is also there as an atomic landmark. This should either be more general (add only if none of its subset is already there) or it should be done only upon request (e.g., heuristics that consider orders might want to @@ -154,7 +154,7 @@ void LandmarkFactoryMerged::generate_landmarks( } vector> landmark_graphs = generate_landmark_graphs_of_subfactories(task); - add_simple_landmarks(landmark_graphs); + add_atomic_landmarks(landmark_graphs); add_disjunctive_landmarks(landmark_graphs); add_landmark_orderings(landmark_graphs); postprocess(); diff --git a/src/search/landmarks/landmark_factory_merged.h b/src/search/landmarks/landmark_factory_merged.h index 0ce0fdfed2..70a6cab917 100644 --- a/src/search/landmarks/landmark_factory_merged.h +++ b/src/search/landmarks/landmark_factory_merged.h @@ -11,7 +11,7 @@ class LandmarkFactoryMerged : public LandmarkFactory { std::vector> generate_landmark_graphs_of_subfactories( const std::shared_ptr &task); - void add_simple_landmarks( + void add_atomic_landmarks( const std::vector> &landmark_graphs) const; void add_disjunctive_landmarks( const std::vector> &landmark_graphs) const; diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 4824d16b95..031e11629e 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -365,8 +365,8 @@ bool LandmarkFactoryReasonableOrdersHPS::interferes( const TaskProxy &task_proxy, const Landmark &landmark_a, const Landmark &landmark_b) const { assert(landmark_a != landmark_b); - assert(!landmark_a.type == DISJUNCTIVE); - assert(!landmark_b.type == DISJUNCTIVE); + assert(landmark_a.type != DISJUNCTIVE); + assert(landmark_b.type != DISJUNCTIVE); VariablesProxy variables = task_proxy.get_variables(); for (const FactPair &atom_b : landmark_b.atoms) { diff --git a/src/search/landmarks/landmark_factory_rpg_exhaust.cc b/src/search/landmarks/landmark_factory_rpg_exhaust.cc index a8ea64628d..4baf2a8191 100644 --- a/src/search/landmarks/landmark_factory_rpg_exhaust.cc +++ b/src/search/landmarks/landmark_factory_rpg_exhaust.cc @@ -44,18 +44,18 @@ static bool relaxed_task_solvable( void LandmarkFactoryRpgExhaust::generate_goal_landmarks( const TaskProxy &task_proxy) const { for (FactProxy goal : task_proxy.get_goals()) { - Landmark landmark({goal.get_pair()}, SIMPLE, true); + Landmark landmark({goal.get_pair()}, ATOMIC, true); landmark_graph->add_landmark(move(landmark)); } } -void LandmarkFactoryRpgExhaust::generate_all_simple_landmarks( +void LandmarkFactoryRpgExhaust::generate_all_atomic_landmarks( const TaskProxy &task_proxy, Exploration &exploration) const { for (VariableProxy var : task_proxy.get_variables()) { for (int value = 0; value < var.get_domain_size(); ++value) { const FactPair atom(var.get_id(), value); - if (!landmark_graph->contains_simple_landmark(atom)) { - Landmark landmark({atom}, SIMPLE); + if (!landmark_graph->contains_atomic_landmark(atom)) { + Landmark landmark({atom}, ATOMIC); if (!relaxed_task_solvable(task_proxy, exploration, landmark, use_unary_relaxation)) { landmark_graph->add_landmark(move(landmark)); @@ -73,7 +73,7 @@ void LandmarkFactoryRpgExhaust::generate_relaxed_landmarks( << endl; } generate_goal_landmarks(task_proxy); - generate_all_simple_landmarks(task_proxy, exploration); + generate_all_atomic_landmarks(task_proxy, exploration); } bool LandmarkFactoryRpgExhaust::supports_conditional_effects() const { diff --git a/src/search/landmarks/landmark_factory_rpg_exhaust.h b/src/search/landmarks/landmark_factory_rpg_exhaust.h index 9a57f0423c..782ad95f21 100644 --- a/src/search/landmarks/landmark_factory_rpg_exhaust.h +++ b/src/search/landmarks/landmark_factory_rpg_exhaust.h @@ -7,7 +7,7 @@ namespace landmarks { class LandmarkFactoryRpgExhaust : public LandmarkFactoryRelaxation { const bool use_unary_relaxation; void generate_goal_landmarks(const TaskProxy &task_proxy) const; - void generate_all_simple_landmarks( + void generate_all_atomic_landmarks( const TaskProxy &task_proxy, Exploration &exploration) const; virtual void generate_relaxed_landmarks( const std::shared_ptr &task, diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 3d8ef839a9..70715ce32c 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -218,17 +218,17 @@ static vector get_natural_parents(const LandmarkNode *node) { } void LandmarkFactoryRpgSasp::remove_disjunctive_landmark_and_rewire_orderings( - LandmarkNode &simple_landmark_node) { + LandmarkNode &atomic_landmark_node) { /* In issue1004, we fixed a bug in this part of the code. It now removes the disjunctive landmark along with all its orderings from - the landmark graph and adds a new simple landmark node. Before + the landmark graph and adds a new atomic landmark node. Before this change, incoming orderings were maintained, which is not always correct for greedy-necessary orderings. We now replace those incoming orderings with natural orderings. */ - const Landmark &landmark = simple_landmark_node.get_landmark(); - assert(landmark.type == SIMPLE); + const Landmark &landmark = atomic_landmark_node.get_landmark(); + assert(landmark.type == ATOMIC); LandmarkNode *disjunctive_landmark_node = &landmark_graph->get_disjunctive_landmark_node(landmark.atoms[0]); remove_occurrences_of_landmark_node(disjunctive_landmark_node); @@ -237,34 +237,34 @@ void LandmarkFactoryRpgSasp::remove_disjunctive_landmark_and_rewire_orderings( assert(use_orders || parents.empty()); landmark_graph->remove_node(disjunctive_landmark_node); /* Add incoming orderings of replaced `disjunctive_landmark_node` as - natural orderings to `simple_node`. */ + natural orderings to `atomic_landmark_node`. */ for (LandmarkNode *parent : parents) { add_or_replace_ordering_if_stronger( - *parent, simple_landmark_node, OrderingType::NATURAL); + *parent, atomic_landmark_node, OrderingType::NATURAL); } } -void LandmarkFactoryRpgSasp::add_simple_landmark_and_ordering( +void LandmarkFactoryRpgSasp::add_atomic_landmark_and_ordering( const FactPair &atom, LandmarkNode &node, OrderingType type) { - if (landmark_graph->contains_simple_landmark(atom)) { + if (landmark_graph->contains_atomic_landmark(atom)) { if (use_orders) { - LandmarkNode &simple_landmark = - landmark_graph->get_simple_landmark_node(atom); - add_or_replace_ordering_if_stronger(simple_landmark, node, type); + LandmarkNode &atomic_landmark = + landmark_graph->get_atomic_landmark_node(atom); + add_or_replace_ordering_if_stronger(atomic_landmark, node, type); } return; } - Landmark landmark({atom}, SIMPLE); - LandmarkNode &simple_landmark_node = + Landmark landmark({atom}, ATOMIC); + LandmarkNode &atomic_landmark_node = landmark_graph->add_landmark(move(landmark)); - open_landmarks.push_back(&simple_landmark_node); + open_landmarks.push_back(&atomic_landmark_node); if (use_orders) { - add_or_replace_ordering_if_stronger(simple_landmark_node, node, type); + add_or_replace_ordering_if_stronger(atomic_landmark_node, node, type); } if (landmark_graph->contains_disjunctive_landmark(atom)) { // Simple landmarks are more informative than disjunctive ones. - remove_disjunctive_landmark_and_rewire_orderings(simple_landmark_node); + remove_disjunctive_landmark_and_rewire_orderings(atomic_landmark_node); } } @@ -274,11 +274,11 @@ bool LandmarkFactoryRpgSasp::deal_with_overlapping_landmarks( OrderingType type) const { if (ranges::any_of( atoms.begin(), atoms.end(), [&](const FactPair &atom) { - return landmark_graph->contains_simple_landmark(atom); + return landmark_graph->contains_atomic_landmark(atom); })) { /* - Do not add the landmark because the simple one is stronger. Do not add - the ordering(s) to the corresponding simple landmark(s) as they are + Do not add the landmark because the atomic one is stronger. Do not add + the ordering(s) to the corresponding atomic landmark(s) as they are not guaranteed to hold. */ return true; @@ -439,9 +439,9 @@ void LandmarkFactoryRpgSasp::extend_disjunction_class_lookups( } /* Only deal with propositions that are not shared preconditions - (which have been found already and are simple landmarks). */ + (which have been found already and are atomic landmarks). */ FactPair precondition(var, value); - if (!landmark_graph->contains_simple_landmark(precondition)) { + if (!landmark_graph->contains_atomic_landmark(precondition)) { preconditions[disjunction_class].push_back(precondition); used_operators[disjunction_class].insert(op_id); } @@ -503,7 +503,7 @@ vector> LandmarkFactoryRpgSasp::compute_disjunctive_pre void LandmarkFactoryRpgSasp::generate_goal_landmarks( const TaskProxy &task_proxy) { for (FactProxy goal : task_proxy.get_goals()) { - Landmark landmark({goal.get_pair()}, SIMPLE, true); + Landmark landmark({goal.get_pair()}, ATOMIC, true); LandmarkNode &node = landmark_graph->add_landmark(move(landmark)); open_landmarks.push_back(&node); } @@ -517,7 +517,7 @@ void LandmarkFactoryRpgSasp::generate_shared_precondition_landmarks( /* All shared preconditions are landmarks, and greedy-necessary predecessors of `landmark`. */ for (const FactPair &atom : shared_preconditions) { - add_simple_landmark_and_ordering( + add_atomic_landmark_and_ordering( atom, *node, OrderingType::GREEDY_NECESSARY); } } @@ -535,7 +535,7 @@ void LandmarkFactoryRpgSasp::generate_disjunctive_precondition_landmarks( preconditions.begin(), preconditions.end(), [&](const FactPair &atom) { /* TODO: Is there a good reason why not? We allow - simple landmarks to hold in the initial state. */ + atomic landmarks to hold in the initial state. */ return initial_state[atom.var].get_value() == atom.value; })) { add_disjunctive_landmark_and_ordering( @@ -653,7 +653,7 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orderings( const Landmark &landmark = node->get_landmark(); forward_orderings[node] = compute_atoms_unreachable_without_landmark( variables, landmark, reached); - if (landmark.type != SIMPLE) { + if (landmark.type != ATOMIC) { return; } assert(landmark.atoms.size() == 1); @@ -665,7 +665,7 @@ void LandmarkFactoryRpgSasp::approximate_lookahead_orderings( init_atom.value, landmark_atom.value, reached[landmark_atom.var], dtg_successors[landmark_atom.var]); for (int value : critical_predecessors) { - add_simple_landmark_and_ordering(FactPair(landmark_atom.var, value), + add_atomic_landmark_and_ordering(FactPair(landmark_atom.var, value), *node, OrderingType::NATURAL); } } @@ -721,9 +721,9 @@ utils::HashSet LandmarkFactoryRpgSasp::compute_atoms_unreachable_witho void LandmarkFactoryRpgSasp::add_landmark_forward_orderings() { for (const auto &node : *landmark_graph) { for (const auto &node2_pair : forward_orderings[node.get()]) { - if (landmark_graph->contains_simple_landmark(node2_pair)) { + if (landmark_graph->contains_atomic_landmark(node2_pair)) { LandmarkNode &node2 = - landmark_graph->get_simple_landmark_node(node2_pair); + landmark_graph->get_atomic_landmark_node(node2_pair); add_or_replace_ordering_if_stronger( *node, node2, OrderingType::NATURAL); } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index 44d979aabd..4dc24e0d0c 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -67,8 +67,8 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { Exploration &exploration) override; void remove_occurrences_of_landmark_node(const LandmarkNode *node); void remove_disjunctive_landmark_and_rewire_orderings( - LandmarkNode &simple_landmark_node); - void add_simple_landmark_and_ordering( + LandmarkNode &atomic_landmark_node); + void add_atomic_landmark_and_ordering( const FactPair &atom, LandmarkNode &node, OrderingType type); bool deal_with_overlapping_landmarks( const utils::HashSet &atoms, LandmarkNode &node, diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index 755e8ec7a6..84c39a74b5 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -46,7 +46,7 @@ bool LandmarkFactoryZhuGivan::goal_is_reachable( if (log.is_at_least_normal()) { log << "Problem not solvable, even if relaxed." << endl; } - Landmark landmark({goal.get_pair()}, SIMPLE, true); + Landmark landmark({goal.get_pair()}, ATOMIC, true); landmark_graph->add_landmark(move(landmark)); return false; } @@ -57,11 +57,11 @@ bool LandmarkFactoryZhuGivan::goal_is_reachable( LandmarkNode *LandmarkFactoryZhuGivan::create_goal_landmark( const FactPair &goal) const { LandmarkNode *node; - if (landmark_graph->contains_simple_landmark(goal)) { - node = &landmark_graph->get_simple_landmark_node(goal); + if (landmark_graph->contains_atomic_landmark(goal)) { + node = &landmark_graph->get_atomic_landmark_node(goal); node->get_landmark().is_true_in_goal = true; } else { - Landmark landmark({goal}, SIMPLE, true); + Landmark landmark({goal}, ATOMIC, true); node = &landmark_graph->add_landmark(move(landmark)); } return node; @@ -80,10 +80,10 @@ void LandmarkFactoryZhuGivan::extract_landmarks_and_orderings_from_goal_labels( } LandmarkNode *node; - if (landmark_graph->contains_simple_landmark(atom)) { - node = &landmark_graph->get_simple_landmark_node(atom); + if (landmark_graph->contains_atomic_landmark(atom)) { + node = &landmark_graph->get_atomic_landmark_node(atom); } else { - Landmark landmark({atom}, SIMPLE); + Landmark landmark({atom}, ATOMIC); node = &landmark_graph->add_landmark(move(landmark)); } if (use_orders) { diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index 67a5d566f7..52efa17ad6 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -26,24 +26,24 @@ const LandmarkNode *LandmarkGraph::get_node(int i) const { return nodes[i].get(); } -LandmarkNode &LandmarkGraph::get_simple_landmark_node( +LandmarkNode &LandmarkGraph::get_atomic_landmark_node( const FactPair &atom) const { - assert(contains_simple_landmark(atom)); - return *(simple_landmarks_to_nodes.find(atom)->second); + assert(contains_atomic_landmark(atom)); + return *(atomic_landmarks_to_nodes.find(atom)->second); } LandmarkNode &LandmarkGraph::get_disjunctive_landmark_node( const FactPair &atom) const { /* Note: this only works because every proposition appears in only one disjunctive landmark. */ - assert(!contains_simple_landmark(atom)); + assert(!contains_atomic_landmark(atom)); assert(contains_disjunctive_landmark(atom)); return *(disjunctive_landmarks_to_nodes.find(atom)->second); } -bool LandmarkGraph::contains_simple_landmark(const FactPair &atom) const { - return simple_landmarks_to_nodes.contains(atom); +bool LandmarkGraph::contains_atomic_landmark(const FactPair &atom) const { + return atomic_landmarks_to_nodes.contains(atom); } bool LandmarkGraph::contains_disjunctive_landmark(const FactPair &atom) const { @@ -78,8 +78,8 @@ bool LandmarkGraph::contains_superset_disjunctive_landmark( bool LandmarkGraph::contains_landmark(const FactPair &atom) const { /* Note: this only checks for one atom whether it's part of a landmark, - hence only simple and disjunctive landmarks are checked. */ - return contains_simple_landmark(atom) || + hence only atomic and disjunctive landmarks are checked. */ + return contains_atomic_landmark(atom) || contains_disjunctive_landmark(atom); } @@ -98,7 +98,7 @@ LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark_to_add) { TODO: Avoid having to fetch landmark after moving it. This will only be possible after removing the assumption that landmarks don't overlap because we wont need `disjunctive_landmarks_to_nodes` and - `simple_landmarks_to_nodes` anymore. + `atomic_landmarks_to_nodes` anymore. */ LandmarkNode *new_node = add_node(move(landmark_to_add)); const Landmark &landmark = new_node->get_landmark(); @@ -110,8 +110,8 @@ LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark_to_add) { } ++num_disjunctive_landmarks; break; - case SIMPLE: - simple_landmarks_to_nodes.emplace(landmark.atoms.front(), new_node); + case ATOMIC: + atomic_landmarks_to_nodes.emplace(landmark.atoms.front(), new_node); break; case CONJUNCTIVE: ++num_conjunctive_landmarks; @@ -137,8 +137,8 @@ void LandmarkGraph::remove_node_occurrences(LandmarkNode *node) { disjunctive_landmarks_to_nodes.erase(atom); } break; - case SIMPLE: - simple_landmarks_to_nodes.erase(landmark.atoms[0]); + case ATOMIC: + atomic_landmarks_to_nodes.erase(landmark.atoms[0]); break; case CONJUNCTIVE: --num_conjunctive_landmarks; diff --git a/src/search/landmarks/landmark_graph.h b/src/search/landmarks/landmark_graph.h index e130a99cd9..5d429ca267 100644 --- a/src/search/landmarks/landmark_graph.h +++ b/src/search/landmarks/landmark_graph.h @@ -70,7 +70,7 @@ class LandmarkGraph { int num_conjunctive_landmarks; int num_disjunctive_landmarks; - utils::HashMap simple_landmarks_to_nodes; + utils::HashMap atomic_landmarks_to_nodes; utils::HashMap disjunctive_landmarks_to_nodes; void remove_node_occurrences(LandmarkNode *node); @@ -121,7 +121,7 @@ class LandmarkGraph { const LandmarkNode *get_node(int index) const; /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. */ - LandmarkNode &get_simple_landmark_node(const FactPair &atom) const; + LandmarkNode &get_atomic_landmark_node(const FactPair &atom) const; /* This is needed only by landmark graph factories and will disappear get_num_landmarks when moving landmark graph creation there. */ LandmarkNode &get_disjunctive_landmark_node(const FactPair &atom) const; @@ -129,7 +129,7 @@ class LandmarkGraph { /* This is needed only by landmark graph factories and will disappear when moving landmark graph creation there. It is not needed by HMLandmarkFactory. */ - bool contains_simple_landmark(const FactPair &atom) const; + bool contains_atomic_landmark(const FactPair &atom) const; // Only used internally. bool contains_disjunctive_landmark(const FactPair &atom) const; /* This is needed only by landmark graph factories and will disappear From 412c6452849fc8f38a0a053690586c04569b2cee Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 23 Apr 2025 17:23:19 +0200 Subject: [PATCH 105/108] Clean up TODOs and other stuff. --- src/search/CMakeLists.txt | 2 +- .../cartesian_abstractions/utils_landmarks.cc | 3 ++- src/search/landmarks/exploration.cc | 2 -- .../landmark_cost_partitioning_algorithms.cc | 27 +++++++++---------- src/search/landmarks/landmark_factory.cc | 3 --- ..._factory_h_m.cc => landmark_factory_hm.cc} | 2 +- ...rk_factory_h_m.h => landmark_factory_hm.h} | 0 .../landmarks/landmark_factory_merged.cc | 2 -- .../landmark_factory_reasonable_orders_hps.cc | 13 --------- .../landmarks/landmark_factory_rpg_sasp.cc | 2 -- .../landmarks/landmark_factory_rpg_sasp.h | 2 -- .../landmarks/landmark_factory_zhu_givan.cc | 15 +++++------ src/search/landmarks/landmark_graph.cc | 3 +-- src/search/landmarks/landmark_heuristic.cc | 2 +- src/search/utils/component_errors.h | 4 +-- 15 files changed, 27 insertions(+), 55 deletions(-) rename src/search/landmarks/{landmark_factory_h_m.cc => landmark_factory_hm.cc} (99%) rename src/search/landmarks/{landmark_factory_h_m.h => landmark_factory_hm.h} (100%) diff --git a/src/search/CMakeLists.txt b/src/search/CMakeLists.txt index 1c10409130..f2e9d902c6 100644 --- a/src/search/CMakeLists.txt +++ b/src/search/CMakeLists.txt @@ -849,7 +849,7 @@ create_fast_downward_library( landmarks/landmark_cost_partitioning_algorithms landmarks/landmark_cost_partitioning_heuristic landmarks/landmark_factory - landmarks/landmark_factory_h_m + landmarks/landmark_factory_hm landmarks/landmark_factory_reasonable_orders_hps landmarks/landmark_factory_merged landmarks/landmark_factory_relaxation diff --git a/src/search/cartesian_abstractions/utils_landmarks.cc b/src/search/cartesian_abstractions/utils_landmarks.cc index b3a15b1844..1d9c0fb3b3 100644 --- a/src/search/cartesian_abstractions/utils_landmarks.cc +++ b/src/search/cartesian_abstractions/utils_landmarks.cc @@ -2,7 +2,7 @@ #include "../plugins/plugin.h" #include "../landmarks/landmark.h" -#include "../landmarks/landmark_factory_h_m.h" +#include "../landmarks/landmark_factory_hm.h" #include "../landmarks/landmark_graph.h" #include "../utils/logging.h" @@ -16,6 +16,7 @@ using namespace landmarks; namespace cartesian_abstractions { static FactPair get_atom(const Landmark &landmark) { // We assume that the given Landmarks are from an h^m landmark graph with m=1. + assert(landmark.type == ATOMIC); assert(landmark.atoms.size() == 1); return landmark.atoms[0]; } diff --git a/src/search/landmarks/exploration.cc b/src/search/landmarks/exploration.cc index 7c18549afa..1fb20669d6 100644 --- a/src/search/landmarks/exploration.cc +++ b/src/search/landmarks/exploration.cc @@ -13,8 +13,6 @@ using namespace std; namespace landmarks { /* - TODO: Verify this comment. - Implementation note: Compared to RelaxationHeuristic, we *cannot simplify* unary operators, because this may conflict with excluded operators. For an example, consider that unary operator o1 is thrown out during diff --git a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc index b41ac2fa15..6c752fac0c 100644 --- a/src/search/landmarks/landmark_cost_partitioning_algorithms.cc +++ b/src/search/landmarks/landmark_cost_partitioning_algorithms.cc @@ -3,15 +3,12 @@ #include "landmark.h" #include "landmark_graph.h" #include "landmark_status_manager.h" -#include "util.h" #include "../utils/collections.h" #include "../utils/language.h" #include #include -#include -#include #include using namespace std; @@ -113,7 +110,7 @@ double UniformCostPartitioningAlgorithm::third_pass( ConstBitsetView &past, ConstBitsetView &future) { double cost = 0; for (const LandmarkNode *node : uncovered_landmarks) { - // TODO: Iterate over Landmarks instead of LandmarkNodes + // TODO: Iterate over Landmarks instead of LandmarkNodes. int id = node->get_id(); assert(future.test(id)); utils::unused_variable(future); @@ -152,9 +149,9 @@ double UniformCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( } /* - TODO: Replace with Landmarks (to do so, we need some way to access the - status of a Landmark without access to the ID, which is part of - LandmarkNode). + TODO: Use landmarks instead of landmark nodes. To do so, we need + some way to access the status of a Landmark without access to the + ID which is part of landmark node. */ const vector uncovered_landmarks = second_pass( landmarks_achieved_by_operator, action_landmarks, past, future); @@ -244,13 +241,13 @@ bool OptimalCostPartitioningAlgorithm::define_constraint_matrix( get_achievers(landmark, past.test(id)); /* TODO: We could deal with things more uniformly by just adding a - constraint with no variables because there are no achievers, - which would then be detected as an unsolvable constraint by the - LP solver. However, as of now this does not work because - `get_cost_partitioned_heuristic_value` only adds non-empty - constraints to the LP. We should implement this differently, - which requires a solution that does not reuse constraints from - the previous iteration as it does now. + constraint with no variables because there are no achievers + (instead of returning here), which would then be detected as an + unsolvable constraint by the LP solver. However, as of now this + does not work because `get_cost_partitioned_heuristic_value` only + adds non-empty constraints to the LP. We should implement this + differently, which requires a solution that does not reuse + constraints from the previous iteration as it does now. */ if (achievers.empty()) { return true; @@ -285,7 +282,7 @@ double OptimalCostPartitioningAlgorithm::get_cost_partitioned_heuristic_value( /* Copy non-empty constraints and use those in the LP. This significantly speeds up the heuristic calculation. See issue443. */ - // TODO: do not copy the data here. + // TODO: Do not copy the data here. lp.get_constraints().clear(); for (const lp::LPConstraint &constraint : lp_constraints) { if (!constraint.empty()) { diff --git a/src/search/landmarks/landmark_factory.cc b/src/search/landmarks/landmark_factory.cc index 5a24f73326..a0a140510e 100644 --- a/src/search/landmarks/landmark_factory.cc +++ b/src/search/landmarks/landmark_factory.cc @@ -92,9 +92,6 @@ void LandmarkFactory::add_ordering( landmarks is already present, the stronger ordering type wins. */ void LandmarkFactory::add_or_replace_ordering_if_stronger( LandmarkNode &from, LandmarkNode &to, OrderingType type) const { - // TODO: Understand why self-loops are not allowed. - assert(&from != &to); - if (weaker_ordering_exists(from, to, type)) { remove_ordering(from, to); } diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_hm.cc similarity index 99% rename from src/search/landmarks/landmark_factory_h_m.cc rename to src/search/landmarks/landmark_factory_hm.cc index b0ece6cb6c..8db8a630d2 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_hm.cc @@ -1,4 +1,4 @@ -#include "landmark_factory_h_m.h" +#include "landmark_factory_hm.h" #include diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_hm.h similarity index 100% rename from src/search/landmarks/landmark_factory_h_m.h rename to src/search/landmarks/landmark_factory_hm.h diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index a5a011f02a..4f7c5e9567 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -6,8 +6,6 @@ #include "../utils/component_errors.h" #include "../plugins/plugin.h" -#include - #include "util.h" using namespace std; diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 031e11629e..1d99dbd198 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -9,11 +9,6 @@ #include "../utils/logging.h" #include "../utils/markup.h" -#include -#include -#include -#include - using namespace std; namespace landmarks { LandmarkFactoryReasonableOrdersHPS::LandmarkFactoryReasonableOrdersHPS( @@ -99,14 +94,6 @@ void LandmarkFactoryReasonableOrdersHPS::insert_reasonable_orderings( continue; } if (interferes(task_proxy, other_landmark, landmark)) { - /* - TODO: If `other_landmark` interferes with `landmark`, then by - transitivity we know all natural predecessors of `other_landmark` - are also reasonably ordered before `landmark`, but here we only - add the one reasonable ordering. Maybe it's not worth adding the - others as well (transitivity), but it could be interesting to - test the effect of doing so, for example for the cycle heuristic. - */ add_or_replace_ordering_if_stronger( *other, node, OrderingType::REASONABLE); } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index 70715ce32c..1b49d57607 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -534,8 +534,6 @@ void LandmarkFactoryRpgSasp::generate_disjunctive_precondition_landmarks( if (preconditions.size() < 5 && ranges::none_of( preconditions.begin(), preconditions.end(), [&](const FactPair &atom) { - /* TODO: Is there a good reason why not? We allow - atomic landmarks to hold in the initial state. */ return initial_state[atom.var].get_value() == atom.value; })) { add_disjunctive_landmark_and_ordering( diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index 4dc24e0d0c..e55e985b80 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -6,7 +6,6 @@ #include "../utils/hash.h" #include -#include #include #include @@ -19,7 +18,6 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { std::unordered_map> forward_orderings; - // TODO: Maybe introduce a class or struct to represent domain transition graph(s). /* The entry `dtg_successors[var][val]` contains all successor values of the atom var->val in the domain transition graph (aka atomic projection). */ std::vector>> dtg_successors; diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index 84c39a74b5..1d42b7864f 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -119,7 +119,7 @@ LandmarkFactoryZhuGivan::PropositionLayer LandmarkFactoryZhuGivan::initialize_re int var_id = var.get_id(); initial_layer[var_id].resize(var.get_domain_size()); - // label nodes from initial state + // Label nodes from initial state. int value = initial_state[var].get_value(); initial_layer[var_id][value].labels.emplace(var_id, value); @@ -204,8 +204,6 @@ bool LandmarkFactoryZhuGivan::conditional_effect_fires( LandmarkSet LandmarkFactoryZhuGivan::union_of_condition_labels( const ConditionsProxy &conditions, const PropositionLayer ¤t) { - /* TODO: This looks like an O(n^2) algorithm where O(n log n) would - do, a bit like the Python string concatenation anti-pattern. */ LandmarkSet result; for (FactProxy precondition : conditions) { auto [var, value] = precondition.get_pair(); @@ -231,11 +229,12 @@ static bool propagate_labels( /* Updates should always reduce the label set (intersection), except in the special case where `atom` was reached for the first time. - TODO: It would be more accurate to actually test the superset - relationship instead of just comparing set sizes. However, doing so - requires storing a copy of `labels` just to assert this. Also, it's - probably reasonable to trust the implementation of `get_intersection` - used above enough to not even assert this at all here. + + It would be more accurate to actually test the superset relationship + instead of just comparing set sizes. However, doing so requires storing a + copy of `labels` just to assert this. Also, it's probably reasonable to + trust the implementation of `get_intersection` used above enough to not + even assert this at all here. */ int new_labels_size = static_cast(labels.size()); assert(old_labels_size == 0 || old_labels_size >= new_labels_size); diff --git a/src/search/landmarks/landmark_graph.cc b/src/search/landmarks/landmark_graph.cc index 52efa17ad6..681883eaeb 100644 --- a/src/search/landmarks/landmark_graph.cc +++ b/src/search/landmarks/landmark_graph.cc @@ -4,7 +4,6 @@ #include #include -#include #include using namespace std; @@ -97,7 +96,7 @@ LandmarkNode &LandmarkGraph::add_landmark(Landmark &&landmark_to_add) { /* TODO: Avoid having to fetch landmark after moving it. This will only be possible after removing the assumption that landmarks don't overlap - because we wont need `disjunctive_landmarks_to_nodes` and + (issue257) because we wont need `disjunctive_landmarks_to_nodes` and `atomic_landmarks_to_nodes` anymore. */ LandmarkNode *new_node = add_node(move(landmark_to_add)); diff --git a/src/search/landmarks/landmark_heuristic.cc b/src/search/landmarks/landmark_heuristic.cc index 7e762f06af..da2de2a239 100644 --- a/src/search/landmarks/landmark_heuristic.cc +++ b/src/search/landmarks/landmark_heuristic.cc @@ -129,7 +129,7 @@ void LandmarkHeuristic::compute_landmarks_achieved_by_atom() { const int id = node->get_id(); const Landmark &landmark = node->get_landmark(); if (landmark.type == CONJUNCTIVE) { - /*DrDDr + /* TODO: We currently have no way to declare operators preferred based on conjunctive landmarks. We consider this a bug and want to fix it in issue1072. diff --git a/src/search/utils/component_errors.h b/src/search/utils/component_errors.h index 058364013d..027858bf2c 100644 --- a/src/search/utils/component_errors.h +++ b/src/search/utils/component_errors.h @@ -15,8 +15,8 @@ class ComponentArgumentError : public Exception { void verify_argument(bool b, const std::string &message); template -void verify_list_not_empty( - const std::vector &list, const std::string &name) { +void verify_list_not_empty(const std::vector &list, + const std::string &name) { if (list.empty()) { throw ComponentArgumentError( "List argument '" + name + "' has to be non-empty."); From c61a67585d88a189bd9b24c98ea886a13d0b9110 Mon Sep 17 00:00:00 2001 From: ClemensBuechner Date: Wed, 23 Apr 2025 18:28:29 +0200 Subject: [PATCH 106/108] Add noop move constructor. --- src/search/landmarks/landmark_factory_hm.h | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/search/landmarks/landmark_factory_hm.h b/src/search/landmarks/landmark_factory_hm.h index 877229f320..f66e81c898 100644 --- a/src/search/landmarks/landmark_factory_hm.h +++ b/src/search/landmarks/landmark_factory_hm.h @@ -29,6 +29,11 @@ struct PropositionSetComparer { struct ConditionalNoop { std::vector effect_condition; std::vector effect; + + ConditionalNoop(std::vector &&effect_condition, + std::vector &&effect) + : effect_condition(move(effect_condition)), effect(move(effect)) { + } }; /* Corresponds to an operator from the original problem, as well as a From 4e4e98f6a9ccaccf928cf8198cf6b6e55c55b32c Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Fri, 25 Apr 2025 02:35:34 +0200 Subject: [PATCH 107/108] turn proxy classes into ranges --- src/search/task_proxy.h | 29 ++++++++++++++++++++++++++++- src/search/utils/collections.h | 3 +-- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index 58c2036965..f0ee5ae08c 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -116,6 +116,8 @@ class ProxyIterator { using value_type = decltype((*collection)[0]); using difference_type = int; // unused but required by the iterator concept + ProxyIterator() = default; + ProxyIterator(const ProxyIterator &other) = default; ProxyIterator(const ProxyCollection &collection, std::size_t pos) : collection(&collection), pos(pos) { } @@ -148,6 +150,15 @@ inline ProxyIterator end(const ProxyCollection &collection) { return ProxyIterator(collection, collection.size()); } +template +inline ProxyIterator begin(ProxyCollection &collection) { + return ProxyIterator(collection, 0); +} + +template +inline ProxyIterator end(ProxyCollection &collection) { + return ProxyIterator(collection, collection.size()); +} class FactProxy { @@ -195,6 +206,8 @@ class FactsProxyIterator { using value_type = FactProxy; using difference_type = int; // unused but required by the iterator concept + FactsProxyIterator() = default; + FactsProxyIterator(const FactsProxyIterator &) = default; FactsProxyIterator(const AbstractTask &task, int var_id, int value) : task(&task), var_id(var_id), value(value) {} @@ -863,6 +876,8 @@ class ProxyIterator { using difference_type = int; // unused but required by the iterator concept using value_type = FactProxy; + ProxyIterator() = default; + ProxyIterator(const ProxyIterator &other) = default; ProxyIterator(const State &state, int var_id) : state(&state), var_id(var_id) { } @@ -889,12 +904,24 @@ static_assert(std::input_iterator>); static_assert(std::input_iterator>); static_assert(std::input_iterator>); static_assert(std::input_iterator>); -static_assert(std::input_iterator); static_assert(std::input_iterator>); static_assert(std::input_iterator>); static_assert(std::input_iterator>); static_assert(std::input_iterator>); static_assert(std::input_iterator>); +static_assert(std::ranges::range); +static_assert(std::ranges::range); +static_assert(std::ranges::range); +static_assert(std::ranges::range); +static_assert(std::ranges::range); +static_assert(std::ranges::range); +static_assert(std::ranges::range); +static_assert(std::ranges::range); +static_assert(std::ranges::range); + +static_assert(std::input_iterator); +static_assert(std::ranges::range); + #endif diff --git a/src/search/utils/collections.h b/src/search/utils/collections.h index 5d13592ffa..c78f80ac3d 100644 --- a/src/search/utils/collections.h +++ b/src/search/utils/collections.h @@ -77,8 +77,7 @@ template std::vector map_vector(const Collection &collection, MapFunc map_func) { std::vector transformed; transformed.reserve(collection.size()); - std::transform(begin(collection), end(collection), - std::back_inserter(transformed), map_func); + std::ranges::transform(collection, std::back_inserter(transformed), map_func); return transformed; } From 6fb6b5164cea95698514f37375272370ff983812 Mon Sep 17 00:00:00 2001 From: Florian Pommerening Date: Fri, 25 Apr 2025 14:08:39 +0200 Subject: [PATCH 108/108] code cleanup (remove implicitly defined special constructors, destructors, revert workflow) --- .github/workflows/windows.yml | 4 ++-- src/search/heuristics/cea_heuristic.cc | 1 - src/search/task_proxy.h | 16 ---------------- 3 files changed, 2 insertions(+), 19 deletions(-) diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 5a87339cba..7c631a8970 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -26,8 +26,8 @@ jobs: strategy: matrix: platform: - - {os: "windows-2022", vc: "C:\\Program Files\\Microsoft Visual Studio\\2022\\Enterprise\\VC\\Auxiliary\\Build\\vcvarsall.bat"} - - {os: "windows-2025", vc: "C:\\Program Files\\Microsoft Visual Studio\\2022\\Enterprise\\VC\\Auxiliary\\Build\\vcvarsall.bat"} + - {os: "windows-2022", vc: "C:\\Program Files\\Microsoft Visual Studio\\2022\\Enterprise\\VC\\Auxiliary\\Build\\vcvarsall.bat"} + - {os: "windows-2019", vc: "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Enterprise\\VC\\Auxiliary\\Build\\vcvarsall.bat"} python-version: [3.8] steps: - name: Clone repository diff --git a/src/search/heuristics/cea_heuristic.cc b/src/search/heuristics/cea_heuristic.cc index e070afa7ef..807a5b42bf 100644 --- a/src/search/heuristics/cea_heuristic.cc +++ b/src/search/heuristics/cea_heuristic.cc @@ -228,7 +228,6 @@ void ContextEnhancedAdditiveHeuristic::set_up_local_problem( LocalProblemNode *start = &problem->nodes[start_value]; start->cost = 0; for (size_t i = 0; i < problem->context_variables->size(); ++i) - // TODO issue997: is casting from int to a short here fine? start->context[i] = static_cast(state[(*problem->context_variables)[i]]); add_to_heap(start); diff --git a/src/search/task_proxy.h b/src/search/task_proxy.h index f0ee5ae08c..691ee44f84 100644 --- a/src/search/task_proxy.h +++ b/src/search/task_proxy.h @@ -117,7 +117,6 @@ class ProxyIterator { using difference_type = int; // unused but required by the iterator concept ProxyIterator() = default; - ProxyIterator(const ProxyIterator &other) = default; ProxyIterator(const ProxyCollection &collection, std::size_t pos) : collection(&collection), pos(pos) { } @@ -167,7 +166,6 @@ class FactProxy { public: FactProxy(const AbstractTask &task, int var_id, int value); FactProxy(const AbstractTask &task, const FactPair &fact); - ~FactProxy() = default; VariableProxy get_variable() const; @@ -207,7 +205,6 @@ class FactsProxyIterator { using difference_type = int; // unused but required by the iterator concept FactsProxyIterator() = default; - FactsProxyIterator(const FactsProxyIterator &) = default; FactsProxyIterator(const AbstractTask &task, int var_id, int value) : task(&task), var_id(var_id), value(value) {} @@ -251,7 +248,6 @@ class FactsProxy { public: explicit FactsProxy(const AbstractTask &task) : task(&task) {} - ~FactsProxy() = default; FactsProxyIterator begin() const { return FactsProxyIterator(*task, 0, 0); @@ -286,7 +282,6 @@ class VariableProxy { public: VariableProxy(const AbstractTask &task, int id) : task(&task), id(id) {} - ~VariableProxy() = default; bool operator==(const VariableProxy &other) const { assert(task == other.task); @@ -342,7 +337,6 @@ class VariablesProxy { public: explicit VariablesProxy(const AbstractTask &task) : task(&task) {} - ~VariablesProxy() = default; std::size_t size() const { return task->get_num_variables(); @@ -365,7 +359,6 @@ class PreconditionsProxy : public ConditionsProxy { public: PreconditionsProxy(const AbstractTask &task, int op_index, bool is_axiom) : ConditionsProxy(task), op_index(op_index), is_axiom(is_axiom) {} - ~PreconditionsProxy() = default; std::size_t size() const override { return task->get_num_operator_preconditions(op_index, is_axiom); @@ -387,7 +380,6 @@ class EffectConditionsProxy : public ConditionsProxy { EffectConditionsProxy( const AbstractTask &task, int op_index, int eff_index, bool is_axiom) : ConditionsProxy(task), op_index(op_index), eff_index(eff_index), is_axiom(is_axiom) {} - ~EffectConditionsProxy() = default; std::size_t size() const override { return task->get_num_operator_effect_conditions(op_index, eff_index, is_axiom); @@ -409,7 +401,6 @@ class EffectProxy { public: EffectProxy(const AbstractTask &task, int op_index, int eff_index, bool is_axiom) : task(&task), op_index(op_index), eff_index(eff_index), is_axiom(is_axiom) {} - ~EffectProxy() = default; EffectConditionsProxy get_conditions() const { return EffectConditionsProxy(*task, op_index, eff_index, is_axiom); @@ -429,7 +420,6 @@ class EffectsProxy { public: EffectsProxy(const AbstractTask &task, int op_index, bool is_axiom) : task(&task), op_index(op_index), is_axiom(is_axiom) {} - ~EffectsProxy() = default; std::size_t size() const { return task->get_num_operator_effects(op_index, is_axiom); @@ -449,7 +439,6 @@ class OperatorProxy { public: OperatorProxy(const AbstractTask &task, int index, bool is_axiom) : task(&task), index(index), is_an_axiom(is_axiom) {} - ~OperatorProxy() = default; bool operator==(const OperatorProxy &other) const { assert(task == other.task); @@ -501,7 +490,6 @@ class OperatorsProxy { public: explicit OperatorsProxy(const AbstractTask &task) : task(&task) {} - ~OperatorsProxy() = default; std::size_t size() const { return task->get_num_operators(); @@ -527,7 +515,6 @@ class AxiomsProxy { public: explicit AxiomsProxy(const AbstractTask &task) : task(&task) {} - ~AxiomsProxy() = default; std::size_t size() const { return task->get_num_axioms(); @@ -548,7 +535,6 @@ class GoalsProxy : public ConditionsProxy { public: explicit GoalsProxy(const AbstractTask &task) : ConditionsProxy(task) {} - ~GoalsProxy() = default; std::size_t size() const override { return task->get_num_goals(); @@ -665,7 +651,6 @@ class TaskProxy { public: explicit TaskProxy(const AbstractTask &task) : task(&task) {} - ~TaskProxy() = default; TaskID get_id() const { return TaskID(task); @@ -877,7 +862,6 @@ class ProxyIterator { using value_type = FactProxy; ProxyIterator() = default; - ProxyIterator(const ProxyIterator &other) = default; ProxyIterator(const State &state, int var_id) : state(&state), var_id(var_id) { }