Lattice Land Powerdomains Library
Loading...
Searching...
No Matches
tables.hpp
Go to the documentation of this file.
1// Copyright 2023 Pierre Talbot
2
3#ifndef LALA_POWER_TABLES_HPP
4#define LALA_POWER_TABLES_HPP
5
6#include "battery/vector.hpp"
7#include "battery/shared_ptr.hpp"
8#include "battery/dynamic_bitset.hpp"
9#include "lala/logic/logic.hpp"
10#include "lala/universes/primitive_upset.hpp"
11#include "lala/abstract_deps.hpp"
12
13namespace lala {
14
15template <class A, class U, class Alloc> class Tables;
16namespace impl {
17 template <class>
18 struct is_table_like {
19 static constexpr bool value = false;
20 };
21 template<class A, class U, class Alloc>
22 struct is_table_like<Tables<A, U, Alloc>> {
23 static constexpr bool value = true;
24 };
25}
26
27/** The tables abstract domain is designed to represent predicates in extension by listing all their solutions explicitly.
28 * It is inspired by the table global constraint and generalizes it by lifting each element of the table to a lattice element.
29 * We expect `U` to be equally or less expressive than `A::universe_type`, this is because we compute the meet in `A::universe_type` and not in `U`.
30 */
31template <class A, class U = typename A::universe_type, class Allocator = typename A::allocator_type>
32class Tables {
33public:
34 using allocator_type = Allocator;
35 using sub_allocator_type = typename A::allocator_type;
36 using universe_type = U;
37 using local_universe = typename universe_type::local_type;
38 using sub_universe_type = typename A::universe_type;
39 using sub_local_universe = typename sub_universe_type::local_type;
40 using memory_type = typename universe_type::memory_type;
41 using sub_type = A;
42 using sub_ptr = abstract_ptr<sub_type>;
44
45 constexpr static const bool is_abstract_universe = false;
46 constexpr static const bool sequential = sub_type::sequential;
47 constexpr static const bool is_totally_ordered = false;
48 constexpr static const bool preserve_bot = sub_type::preserve_bot;
49 constexpr static const bool preserve_top = sub_type::preserve_top;
50 // The next properties should be checked more seriously, relying on the sub-domain might be uneccessarily restrictive.
51 constexpr static const bool preserve_join = sub_type::preserve_join;
52 constexpr static const bool preserve_meet = sub_type::preserve_meet;
53 constexpr static const bool injective_concretization = sub_type::injective_concretization;
54 constexpr static const bool preserve_concrete_covers = sub_type::preserve_concrete_covers;
55 constexpr static const char* name = "Tables";
56
57 using table_type = battery::vector<
58 battery::vector<universe_type, allocator_type>,
60 using table_headers = battery::vector<battery::vector<int, allocator_type>, allocator_type>;
61 using table_collection_type = battery::vector<table_type, allocator_type>;
62 using bitset_type = battery::dynamic_bitset<memory_type, allocator_type>;
63
64private:
65 AType atype;
66 AType store_aty;
67 sub_ptr sub;
68
69 table_collection_type tell_tables;
70 table_collection_type ask_tables;
71
72 // See `refine`.
73 battery::vector<size_t, allocator_type> table_idx_to_column;
74 battery::vector<size_t, allocator_type> column_to_table_idx;
75 size_t total_cells;
76
77 // We keep a bitset representation of each variable in the tables.
78 // We perform a reduced product between this representation and the underlying domain.
79 battery::vector<bitset_type, allocator_type> bitset_store;
80 battery::vector<AVar, allocator_type> header2var;
81
82 struct Table {
83 size_t table_num; // The index to the shared table (either tell_tables or ask_tables).
84 battery::vector<int, allocator_type> header; // The variables for this table.
85 battery::vector<bitset_type, allocator_type> cbitsets; // One bitset per column.
86 bitset_type eliminated_rows; // The eliminated rows in this table.
87
88 Table(size_t table_num, battery::vector<int, allocator_type>&& header)
89 : table_num(table_num)
90 , header(std::move(header))
91 , cbitsets(header.size(), header.get_allocator())
92 , eliminated_rows(header.get_allocator()) // at start no row is eliminated.
93 {}
94
95 /** We have one refine operator per column in the table.
96 * This operator union all the values of the active rows, and join the result in the bitset store.
97 */
98 template <class Mem>
99 CUDA void crefine(size_t i, BInc<Mem>& has_changed) {
100 cbitsets[i].reset();
101 for(int j = 0; j < tell_tables[table_num].size(); ++j) {
102 if(!eliminated_rows.test(j)) {
103 tell_tables[table_num][j][i].value()
104 u.dtell(convert<IKind::TELL>());
105 }
106 }
107 bitset_store[header[i]]
108 // Reduced product.
109 sub->tell(headers[table_num][col], u, has_changed);
110 }
111
112 template <class Mem>
113 CUDA void lrefine(size_t table_num, size_t row, size_t col, BInc<Mem>& has_changed)
114 {
115 if(!eliminated_rows[table_num].test(row))
116 {
117 if(join(convert<IKind::TELL>(tell_tables[table_num][row][col]), sub->project(headers[table_num][col])).is_top()) {
118 eliminated_rows[table_num].set(row);
119 has_changed.tell_top();
120 }
121 }
122 }
123
124
125 };
126
127public:
128 template <class Alloc>
129 struct tell_type {
130 using allocator_type = Alloc;
131
132 typename A::template tell_type<Alloc> sub;
133
134 battery::vector<battery::vector<AVar, Alloc>, Alloc> headers;
135
136 battery::vector<battery::vector<
137 battery::vector<universe_type, Alloc>,
138 Alloc>, Alloc> tell_tables;
139
140 battery::vector<battery::vector<
141 battery::vector<universe_type, Alloc>,
142 Alloc>, Alloc> ask_tables;
143
144 CUDA tell_type(const Alloc& alloc = Alloc{})
145 : sub(alloc)
146 , headers(alloc)
147 , tell_tables(alloc)
148 , ask_tables(alloc)
149 {}
150 tell_type(const tell_type&) = default;
151 tell_type(tell_type&&) = default;
153 tell_type& operator=(const tell_type&) = default;
154
155 template <class TableTellType>
156 CUDA NI tell_type(const TableTellType& other, const Alloc& alloc = Alloc{})
157 : sub(other.sub, alloc)
158 , headers(other.headers, alloc)
159 , tell_tables(other.tell_tables, alloc)
160 , ask_tables(other.ask_tables, alloc)
161 {}
162
164 return headers.get_allocator();
165 }
166
167 template <class Alloc2>
168 friend class tell_type;
169 };
170
171 template <class Alloc>
172 struct ask_type {
173 using allocator_type = Alloc;
174
175 typename A::template ask_type<Alloc> sub;
176 battery::vector<battery::vector<AVar, Alloc>, Alloc> headers;
177 battery::vector<battery::vector<
178 battery::vector<universe_type, Alloc>,
179 Alloc>, Alloc> ask_tables;
180
181 CUDA ask_type(const Alloc& alloc = Alloc{})
182 : sub(alloc)
183 , headers(alloc)
184 , ask_tables(alloc)
185 {}
186 ask_type(const ask_type&) = default;
187 ask_type(ask_type&&) = default;
189 ask_type& operator=(const ask_type&) = default;
190
191 template <class TableAskType>
192 CUDA NI ask_type(const TableAskType& other, const Alloc& alloc = Alloc{})
193 : sub(other.sub, alloc)
194 , headers(other.headers, alloc)
195 , ask_tables(other.ask_tables, alloc)
196 {}
197
199 return headers.get_allocator();
200 }
201
202 template <class Alloc2>
203 friend class ask_type;
204 };
205
206 template <class A2, class U2, class Alloc2>
207 friend class Tables;
208
209public:
210 CUDA Tables(AType uid, AType store_aty, sub_ptr sub, const allocator_type& alloc = allocator_type())
211 : atype(uid)
212 , store_aty(store_aty)
213 , sub(std::move(sub))
214 , headers(alloc)
215 , tell_tables(alloc)
216 , ask_tables(alloc)
217 , eliminated_rows(alloc)
218 , table_idx_to_column({0}, alloc)
219 , column_to_table_idx(alloc)
220 , total_cells(0)
221 , bitset_store(alloc)
222 , header2var(alloc)
223 {}
224
225 CUDA Tables(AType uid, sub_ptr sub, const allocator_type& alloc = allocator_type())
226 : Tables(uid, sub->aty(), sub, alloc)
227 {}
228
229 template<class A2, class U2, class Alloc2, class... Allocators>
230 CUDA NI Tables(const Tables<A2, U2, Alloc2>& other, AbstractDeps<Allocators...>& deps)
231 : atype(other.atype)
232 , store_aty(other.store_aty)
233 , sub(deps.template clone<sub_type>(other.sub))
234 , headers(other.headers, deps.template get_allocator<allocator_type>())
235 , tell_tables(other.tell_tables, deps.template get_allocator<allocator_type>())
236 , ask_tables(other.ask_tables, deps.template get_allocator<allocator_type>())
237 , eliminated_rows(other.eliminated_rows, deps.template get_allocator<allocator_type>())
238 , table_idx_to_column(other.table_idx_to_column, deps.template get_allocator<allocator_type>())
239 , column_to_table_idx(other.column_to_table_idx, deps.template get_allocator<allocator_type>())
240 , total_cells(other.total_cells)
241 , bitset_store(other.bitset_store, deps.template get_allocator<allocator_type>())
242 , header2var(other.header2var, deps.template get_allocator<allocator_type>())
243 {}
244
245 CUDA AType aty() const {
246 return atype;
247 }
248
250 return headers.get_allocator();
251 }
252
253 CUDA sub_ptr subdomain() const {
254 return sub;
255 }
256
257 CUDA local::BDec is_bot() const {
258 return tell_tables.size() == 0 && sub->is_bot();
259 }
260
261 CUDA local::BInc is_top() const {
262 for(int i = 0; i < eliminated_rows.size(); ++i) {
263 if(eliminated_rows[i].count() == tell_tables[i].size()) {
264 return true;
265 }
266 }
267 return sub->is_top();
268 }
269
270 CUDA static this_type bot(AType atype = UNTYPED,
271 AType atype_sub = UNTYPED,
272 const allocator_type& alloc = allocator_type(),
273 const sub_allocator_type& sub_alloc = sub_allocator_type())
274 {
275 return Tables{atype, battery::allocate_shared<sub_type>(alloc, sub_type::bot(atype_sub, sub_alloc)), alloc};
276 }
277
278 /** A special symbolic element representing top. */
279 CUDA static this_type top(AType atype = UNTYPED,
280 AType atype_sub = UNTYPED,
281 const allocator_type& alloc = allocator_type(),
282 const sub_allocator_type& sub_alloc = sub_allocator_type())
283 {
284 return Tables{atype, battery::allocate_shared<sub_type>(sub_alloc, sub_type::top(atype_sub, sub_alloc)), alloc};
285 }
286
287 template <class Env>
288 CUDA static this_type bot(Env& env,
289 const allocator_type& alloc = allocator_type(),
290 const sub_allocator_type& sub_alloc = sub_allocator_type())
291 {
292 AType atype_sub = env.extends_abstract_dom();
293 AType atype = env.extends_abstract_dom();
294 return bot(atype, atype_sub, alloc, sub_alloc);
295 }
296
297 template <class Env>
298 CUDA static this_type top(Env& env,
299 const allocator_type& alloc = allocator_type(),
300 const sub_allocator_type& sub_alloc = sub_allocator_type())
301 {
302 AType atype_sub = env.extends_abstract_dom();
303 AType atype = env.extends_abstract_dom();
304 return top(atype, atype_sub, alloc, sub_alloc);
305 }
306
307 template <class Alloc2>
309 using sub_snap_type = sub_type::template snapshot_type<Alloc2>;
313 battery::vector<bitset_type, Alloc2> bitset_store;
314
319
320 template <class SnapshotType>
321 CUDA snapshot_type(const SnapshotType& other, const Alloc2& alloc = Alloc2())
322 : sub_snap(other.sub_snap, alloc)
323 , num_tables(other.num_tables)
324 , total_cells(other.total_cells)
325 , bitset_store(other.bitset_store, alloc)
326 {}
327
328 CUDA snapshot_type(sub_snap_type&& sub_snap, size_t num_tables, size_t total_cells, const battery::vector<bitset_type, allocator_type>& bitset_store, const Alloc2& alloc = Alloc2())
329 : sub_snap(std::move(sub_snap))
332 , bitset_store(bitset_store, alloc)
333 {}
334 };
335
336 template <class Alloc2 = allocator_type>
337 CUDA snapshot_type<Alloc2> snapshot(const Alloc2& alloc = Alloc2()) const {
338 return snapshot_type<Alloc2>(sub->snapshot(alloc), headers.size(), total_cells, bitset_store, alloc);
339 }
340
341 template <class Alloc2>
342 CUDA void restore(const snapshot_type<Alloc2>& snap) {
343 sub->restore(snap.sub_snap);
344 total_cells = snap.total_cells;
345 table_idx_to_column.resize(snap.num_tables + 1);
346 headers.resize(snap.num_tables);
347 column_to_table_idx.resize(table_idx_to_column.back());
348 tell_tables.resize(snap.num_tables);
349 ask_tables.resize(snap.num_tables);
350 eliminated_rows.resize(snap.num_tables);
351 for(int i = 0; i < eliminated_rows.size(); ++i) {
352 eliminated_rows[i].reset();
353 }
354 bitset_store.resize(snap.bitset_store.size());
355 header2var.resize(snap.bitset_store.size());
356 for(int i = 0; i < bitset_store.size(); ++i) {
357 bitset_store[i].resize(snap.bitset_store[i].size());
358 bitset_store[i] = snap.bitset_store[i];
359 }
360 }
361
362 template <class F>
363 CUDA void flatten_and(const F& f, typename F::Sequence& conjuncts) const {
364 if(f.is(F::Seq) && f.sig() == AND) {
365 for(int i = 0; i < f.seq().size(); ++i) {
366 flatten_and(f.seq(i), conjuncts);
367 }
368 }
369 else {
370 conjuncts.push_back(f);
371 }
372 }
373
374 template <class F>
375 CUDA void flatten_or(const F& f, typename F::Sequence& disjuncts) const {
376 if(f.is(F::Seq) && f.sig() == OR) {
377 for(int i = 0; i < f.seq().size(); ++i) {
378 flatten_or(f.seq(i), disjuncts);
379 }
380 }
381 else {
382 typename F::Sequence conjuncts{disjuncts.get_allocator()};
383 flatten_and(f, conjuncts);
384 if(conjuncts.size() > 1) {
385 disjuncts.push_back(F::make_nary(AND, std::move(conjuncts)));
386 }
387 else {
388 disjuncts.push_back(std::move(conjuncts[0]));
389 }
390 }
391 }
392
393 template <class F>
394 CUDA F flatten(const F& f, const typename F::allocator_type& alloc) const {
395 typename F::Sequence disjuncts{alloc};
396 flatten_or(f, disjuncts);
397 if(disjuncts.size() > 1) {
398 return F::make_nary(OR, std::move(disjuncts));
399 }
400 else {
401 return std::move(disjuncts[0]);
402 }
403 }
404
405 template <IKind kind, bool diagnose = false, class F, class Env, class Alloc>
406 CUDA NI bool interpret_atom(
407 battery::vector<AVar, Alloc>& header,
408 battery::vector<battery::vector<local_universe, Alloc>, Alloc>& tell_table,
409 battery::vector<battery::vector<local_universe, Alloc>, Alloc>& ask_table,
410 const F& f, Env& env, IDiagnostics& diagnostics) const
411 {
412 if(num_vars(f) != 1) {
413 RETURN_INTERPRETATION_ERROR("Only unary formulas are supported in the cell of the table.");
414 }
415 else {
416 auto x_opt = var_in(f, env);
417 if(!x_opt.has_value() || !x_opt->get().avar_of(store_aty).has_value()) {
418 RETURN_INTERPRETATION_ERROR("Undeclared variable.");
419 }
420 AVar x = x_opt->get().avar_of(store_aty).value();
421 int idx = 0;
422 for(; idx < header.size() && header[idx] != x; ++idx) {}
423 // If it's a new variable not present in the previous rows, we add it in each row with bottom value.
424 if(idx == header.size()) {
425 header.push_back(x);
426 for(int i = 0; i < tell_table.size(); ++i) {
427 if constexpr(kind == IKind::TELL) {
428 tell_table[i].push_back(local_universe::bot());
429 }
430 ask_table[i].push_back(local_universe::bot());
431 }
432 }
433 local_universe ask_u{local_universe::bot()};
434 if(ginterpret_in<IKind::ASK, diagnose>(f, env, ask_u, diagnostics)) {
435 ask_table.back()[idx].tell(ask_u);
436 if constexpr(kind == IKind::TELL) {
437 local_universe tell_u{local_universe::bot()};
438 if(ginterpret_in<IKind::TELL, diagnose>(f, env, tell_u, diagnostics)) {
439 tell_table.back()[idx].tell(tell_u);
440 }
441 else {
442 return false;
443 }
444 }
445 }
446 else {
447 return false;
448 }
449 }
450 return true;
451 }
452
453public:
454 template <IKind kind, bool diagnose = false, class F, class Env, class I>
455 CUDA NI bool interpret(const F& f2, Env& env, I& intermediate, IDiagnostics& diagnostics) const {
456 F f = flatten(f2, env.get_allocator());
457 using Alloc = typename I::allocator_type;
458 if(f.is(F::Seq) && f.sig() == OR) {
459 battery::vector<AVar, Alloc> header(intermediate.get_allocator());
460 battery::vector<battery::vector<local_universe, Alloc>, Alloc> tell_table(intermediate.get_allocator());
461 battery::vector<battery::vector<local_universe, Alloc>, Alloc> ask_table(intermediate.get_allocator());
462 for(int i = 0; i < f.seq().size(); ++i) {
463 // Add a row in the table.
464 tell_table.push_back(battery::vector<local_universe, Alloc>(header.size(), local_universe::bot(), intermediate.get_allocator()));
465 ask_table.push_back(battery::vector<local_universe, Alloc>(header.size(), local_universe::bot(), intermediate.get_allocator()));
466 if(f.seq(i).is(F::Seq) && f.seq(i).sig() == AND) {
467 const auto& row = f.seq(i).seq();
468 for(int j = 0; j < row.size(); ++j) {
469 size_t error_ctx = diagnostics.num_suberrors();
470 if(!interpret_atom<kind, diagnose>(header, tell_table, ask_table, row[j], env, diagnostics)) {
471 if(!sub->template interpret<kind, diagnose>(f2, env, intermediate.sub, diagnostics)) {
472 return false;
473 }
474 diagnostics.cut(error_ctx);
475 return true;
476 }
477 }
478 }
479 else {
480 size_t error_ctx = diagnostics.num_suberrors();
481 if(!interpret_atom<kind, diagnose>(header, tell_table, ask_table, f.seq(i), env, diagnostics)) {
482 if(!sub->template interpret<kind, diagnose>(f2, env, intermediate.sub, diagnostics)) {
483 return false;
484 }
485 diagnostics.cut(error_ctx);
486 return true;
487 }
488 }
489 }
490 intermediate.headers.push_back(std::move(header));
491 if constexpr(kind == IKind::TELL) {
492 intermediate.tell_tables.push_back(std::move(tell_table));
493 }
494 intermediate.ask_tables.push_back(std::move(ask_table));
495 return true;
496 }
497 /** tables(N, C, 1D ... table, x1,..,xN, y1,..,yN, ...) where
498 * * N is the number of lines.
499 * * C is the number of columns.
500 * * x1,...,xN is the names of the variables for one table.
501 */
502 else if(f.is(F::ESeq) && f.esig() == "tables") {
503 const auto& tables = f.eseq();
504 int n = tables[0].z();
505 int c = tables[1].z();
506 int num_tables = (tables.size() - 2 - n * c) / c;
507 battery::vector<battery::vector<AVar, Alloc>, Alloc> headers(intermediate.get_allocator());
508 for(int i = 0; i < num_tables; ++i) {
509 headers.push_back(battery::vector<AVar, Alloc>(intermediate.get_allocator()));
510 for(int j = 0; j < c; ++j) {
511 const auto& fvar = tables[2+n*c+i*c+j];
512 if(!fvar.is_variable()) {
513 RETURN_INTERPRETATION_ERROR("Ill-formed predicate `tables(N, C, 1D ... table, x1,..,xN, y1,..,yN, ...)`: expected variable names after the table.");
514 }
515 auto x_opt = var_in(fvar, env);
516 if(!x_opt.has_value() || !x_opt->get().avar_of(store_aty).has_value()) {
517 RETURN_INTERPRETATION_ERROR("Undeclared variable.");
518 }
519 AVar x = x_opt->get().avar_of(store_aty).value();
520 headers.back().push_back(x);
521 }
522 }
523 battery::vector<battery::vector<local_universe, Alloc>, Alloc> tell_table(intermediate.get_allocator());
524 battery::vector<battery::vector<local_universe, Alloc>, Alloc> ask_table(intermediate.get_allocator());
525 for(int i = 0; i < n; ++i) {
526 tell_table.push_back(battery::vector<local_universe, Alloc>(intermediate.get_allocator()));
527 ask_table.push_back(battery::vector<local_universe, Alloc>(intermediate.get_allocator()));
528 for(int j = 0; j < c; ++j) {
529 tell_table[i].push_back(universe_type::bot());
530 ask_table[i].push_back(universe_type::bot());
531 }
532 }
533 for(int i = 0; i < n; ++i) {
534 for(int j = 0; j < c; ++j) {
535 if(!(tables[2+i*c+j].is(F::LV) && tables[2+i*c+j].lv() == "*")) {
536 auto fcell = F::make_binary(F::make_lvar(LVar<typename F::allocator_type>("_")), EQ, tables[2+i*c+j]);
537 local_universe ask_u{local_universe::bot()};
538 if(ginterpret_in<IKind::ASK, diagnose>(fcell, env, ask_u, diagnostics)) {
539 ask_table[i][j].tell(ask_u);
540 if constexpr(kind == IKind::TELL) {
541 local_universe tell_u{local_universe::bot()};
542 if(ginterpret_in<IKind::TELL, diagnose>(fcell, env, tell_u, diagnostics)) {
543 tell_table[i][j].tell(tell_u);
544 }
545 else {
546 return false;
547 }
548 }
549 }
550 else {
551 return false;
552 }
553 }
554 }
555 }
556 intermediate.headers.push_back(std::move(header));
557 if constexpr(kind == IKind::TELL) {
558 intermediate.tell_tables.push_back(std::move(tell_table));
559 }
560 intermediate.ask_tables.push_back(std::move(ask_table));
561 return true;
562 }
563 else {
564 return sub->template interpret<kind, diagnose>(f, env, intermediate.sub, diagnostics);
565 }
566 }
567
568 template <bool diagnose = false, class F, class Env, class Alloc>
569 CUDA NI bool interpret_ask(const F& f, const Env& env, ask_type<Alloc>& ask, IDiagnostics& diagnostics) const {
570 return interpret<IKind::ASK, diagnose>(f, const_cast<Env&>(env), ask, diagnostics);
571 }
572
573 template <bool diagnose = false, class F, class Env, class Alloc>
574 CUDA NI bool interpret_tell(const F& f, Env& env, tell_type<Alloc>& tell, IDiagnostics& diagnostics) const {
575 return interpret<IKind::TELL, diagnose>(f, env, tell, diagnostics);
576 }
577
578 CUDA const sub_universe_type& operator[](int x) const {
579 return (*sub)[x];
580 }
581
582 CUDA size_t vars() const {
583 return sub->vars();
584 }
585
586 CUDA size_t num_tables() const {
587 return headers.size();
588 }
589
590private:
591 template <IKind kind>
592 CUDA sub_local_universe convert(const local_universe& x) const {
593 if constexpr(std::is_same_v<universe_type, sub_universe_type>) {
594 return x;
595 }
596 else {
597 VarEnv<battery::standard_allocator> env;
598 IDiagnostics diagnostics;
599 sub_local_universe v{sub_local_universe::bot()};
600 bool succeed = ginterpret_in<kind>(x.deinterpret(AVar{}, env), env, v, diagnostics);
601 assert(succeed);
602 return v;
603 }
604 }
605
606public:
607 template <class Alloc, class Mem>
608 CUDA this_type& tell(const tell_type<Alloc>& t, BInc<Mem>& has_changed) {
609 if(t.headers.size() > 0) {
610 has_changed.tell_top();
611 }
612 sub->tell(t.sub, has_changed);
613 for(int i = 0; i < t.headers.size(); ++i) {
614 headers.push_back(battery::vector<AVar, allocator_type>(t.headers[i], get_allocator()));
615 for(int j = 0; j < t.headers[i].size(); ++j) {
616 column_to_table_idx.push_back(i);
617 }
618 table_idx_to_column.push_back(table_idx_to_column.back() + t.tell_tables[i][0].size());
619 tell_tables.push_back(table_type(t.tell_tables[i], get_allocator()));
620 ask_tables.push_back(table_type(t.ask_tables[i], get_allocator()));
621 eliminated_rows.push_back(bitset_type(tell_tables.back().size(), get_allocator()));
622 total_cells += tell_tables.back().size() * tell_tables.back()[0].size();
623 }
624 return *this;
625 }
626
627 template <class Alloc>
629 local::BInc has_changed;
630 return tell(t, has_changed);
631 }
632
633 CUDA this_type& tell(AVar x, const sub_universe_type& dom) {
634 sub->tell(x, dom);
635 return *this;
636 }
637
638 template <class Mem>
639 CUDA this_type& tell(AVar x, const sub_universe_type& dom, BInc<Mem>& has_changed) {
640 sub->tell(x, dom, has_changed);
641 return *this;
642 }
643
644private:
645 template <class Alloc>
646 CUDA local::BInc ask(const battery::vector<battery::vector<AVar, Alloc>, Alloc>& header,
647 const battery::vector<battery::vector<battery::vector<universe_type, Alloc>, Alloc>, Alloc>& ask_tables) const
648 {
649 for(int i = 0; i < ask_tables.size(); ++i) {
650 bool table_entailed = false;
651 for(int j = 0; j < ask_tables[i].size() && !table_entailed; ++j) {
652 bool row_entailed = true;
653 for(int k = 0; k < ask_tables[i][j].size(); ++k) {
654 if(!(sub->project(headers[i][k]) >= convert<IKind::ASK>(ask_tables[i][j][k]))) {
655 row_entailed = false;
656 break;
657 }
658 }
659 if(row_entailed) {
660 table_entailed = true;
661 }
662 }
663 if(!table_entailed) {
664 return false;
665 }
666 }
667 return true;
668 }
669
670public:
671 template <class Alloc>
672 CUDA local::BInc ask(const ask_type<Alloc>& a) const {
673 return ask(a.headers, a.ask_tables) && sub->ask(a.sub);
674 }
675
676 template <class Mem>
677 CUDA void crefine(size_t table_num, size_t col, BInc<Mem>& has_changed) {
678 sub_local_universe u{sub_local_universe::top()};
679 for(int j = 0; j < tell_tables[table_num].size(); ++j) {
680 if(!eliminated_rows[table_num].test(j)) {
681 u.dtell(convert<IKind::TELL>(tell_tables[table_num][j][col]));
682 }
683 }
684 sub->tell(headers[table_num][col], u, has_changed);
685 }
686
687 template <class Mem>
688 CUDA void lrefine(size_t table_num, size_t row, size_t col, BInc<Mem>& has_changed)
689 {
690 if(!eliminated_rows[table_num].test(row))
691 {
692 if(join(convert<IKind::TELL>(tell_tables[table_num][row][col]), sub->project(headers[table_num][col])).is_top()) {
693 eliminated_rows[table_num].set(row);
694 has_changed.tell_top();
695 }
696 }
697 }
698
699 CUDA size_t num_refinements() const {
700 return
701 sub->num_refinements() +
702 column_to_table_idx.size() + // number of crefine (one per column).
703 total_cells; // number of lrefine (one per cell).
704 }
705
706 template <class Mem>
707 CUDA void refine(size_t i, BInc<Mem>& has_changed) {
708 assert(i < num_refinements());
709 if(i < sub->num_refinements()) {
710 sub->refine(i, has_changed);
711 }
712 else {
713 i -= sub->num_refinements();
714 if(i < column_to_table_idx.size()) {
715 crefine(column_to_table_idx[i], i - table_idx_to_column[column_to_table_idx[i]], has_changed);
716 }
717 else {
718 i -= column_to_table_idx.size();
719 size_t table_num = 0;
720 bool unfinished = true;
721 // This loop computes the table number of the cell `i`, we avoid stopping the loop earlier to avoid thread divergence.
722 for(int j = 0; j < tell_tables.size(); ++j) {
723 size_t dim_table = tell_tables[j].size() * tell_tables[j][0].size();
724 unfinished &= (i >= dim_table);
725 i -= (unfinished ? dim_table : 0);
726 table_num += (unfinished ? 1 : 0);
727 }
728 lrefine(table_num, i / tell_tables[table_num][0].size(), i % tell_tables[table_num][0].size(), has_changed);
729 }
730 }
731 }
732
733 template <class ExtractionStrategy = NonAtomicExtraction>
734 CUDA bool is_extractable(const ExtractionStrategy& strategy = ExtractionStrategy()) const {
735 // Check all remaining row are entailed.
736 return ask(headers, ask_tables) && sub->is_extractable(strategy);
737 }
738
739 /** Extract an under-approximation if the last node popped \f$ a \f$ is an under-approximation.
740 * If `B` is a search tree, the under-approximation consists in a search tree \f$ \{a\} \f$ with a single node, in that case, `ua` must be different from `top`. */
741 template <class B>
742 CUDA void extract(B& ua) const {
743 if constexpr(impl::is_table_like<B>::value) {
744 sub->extract(*ua.sub);
745 }
746 else {
747 sub->extract(ua);
748 }
749 }
750
751 CUDA sub_universe_type project(AVar x) const {
752 return sub->project(x);
753 }
754
755 template<class Env>
756 CUDA NI TFormula<typename Env::allocator_type> deinterpret(const Env& env) const {
757 using F = TFormula<typename Env::allocator_type>;
758 F sub_f = sub->deinterpret(env);
759 typename F::Sequence seq{env.get_allocator()};
760 if(sub_f.is(F::Seq) && sub_f.sig() == AND) {
761 seq = std::move(sub_f.seq());
762 }
763 else {
764 seq.push_back(std::move(sub_f));
765 }
766 for(int i = 0; i < headers.size(); ++i) {
767 typename F::Sequence disjuncts{env.get_allocator()};
768 for(int j = 0; j < tell_tables[i].size(); ++j) {
769 if(!eliminated_rows[i].test(j)) {
770 typename F::Sequence conjuncts{env.get_allocator()};
771 for(int k = 0; k < tell_tables[i][j].size(); ++k) {
772 if(!(sub->project(headers[i][k]) >= convert<IKind::ASK>(ask_tables[i][j][k]))) {
773 conjuncts.push_back(tell_tables[i][j][k].deinterpret(headers[i][k], env));
774 }
775 }
776 disjuncts.push_back(F::make_nary(AND, std::move(conjuncts), aty()));
777 }
778 }
779 seq.push_back(F::make_nary(OR, std::move(disjuncts), aty()));
780 }
781 return F::make_nary(AND, std::move(seq));
782 }
783};
784
785}
786
787#endif
Definition tables.hpp:32
typename A::allocator_type sub_allocator_type
Definition tables.hpp:35
battery::vector< table_type, allocator_type > table_collection_type
Definition tables.hpp:61
CUDA NI bool interpret_ask(const F &f, const Env &env, ask_type< Alloc > &ask, IDiagnostics &diagnostics) const
Definition tables.hpp:569
static constexpr const bool preserve_meet
Definition tables.hpp:52
CUDA bool is_extractable(const ExtractionStrategy &strategy=ExtractionStrategy()) const
Definition tables.hpp:734
battery::vector< battery::vector< int, allocator_type >, allocator_type > table_headers
Definition tables.hpp:60
CUDA void lrefine(size_t table_num, size_t row, size_t col, BInc< Mem > &has_changed)
Definition tables.hpp:688
CUDA allocator_type get_allocator() const
Definition tables.hpp:249
CUDA NI Tables(const Tables< A2, U2, Alloc2 > &other, AbstractDeps< Allocators... > &deps)
Definition tables.hpp:230
CUDA NI bool interpret_tell(const F &f, Env &env, tell_type< Alloc > &tell, IDiagnostics &diagnostics) const
Definition tables.hpp:574
static constexpr const bool is_totally_ordered
Definition tables.hpp:47
CUDA local::BInc is_top() const
Definition tables.hpp:261
CUDA NI bool interpret(const F &f2, Env &env, I &intermediate, IDiagnostics &diagnostics) const
Definition tables.hpp:455
CUDA void refine(size_t i, BInc< Mem > &has_changed)
Definition tables.hpp:707
static constexpr const char * name
Definition tables.hpp:55
CUDA this_type & tell(AVar x, const sub_universe_type &dom)
Definition tables.hpp:633
CUDA NI bool interpret_atom(battery::vector< AVar, Alloc > &header, battery::vector< battery::vector< local_universe, Alloc >, Alloc > &tell_table, battery::vector< battery::vector< local_universe, Alloc >, Alloc > &ask_table, const F &f, Env &env, IDiagnostics &diagnostics) const
Definition tables.hpp:406
static constexpr const bool preserve_join
Definition tables.hpp:51
static constexpr const bool preserve_concrete_covers
Definition tables.hpp:54
CUDA size_t vars() const
Definition tables.hpp:582
typename universe_type::local_type local_universe
Definition tables.hpp:37
CUDA snapshot_type< Alloc2 > snapshot(const Alloc2 &alloc=Alloc2()) const
Definition tables.hpp:337
static constexpr const bool preserve_top
Definition tables.hpp:49
CUDA void extract(B &ua) const
Definition tables.hpp:742
battery::vector< battery::vector< universe_type, allocator_type >, allocator_type > table_type
Definition tables.hpp:57
static CUDA this_type bot(AType atype=UNTYPED, AType atype_sub=UNTYPED, const allocator_type &alloc=allocator_type(), const sub_allocator_type &sub_alloc=sub_allocator_type())
Definition tables.hpp:270
CUDA local::BInc ask(const ask_type< Alloc > &a) const
Definition tables.hpp:672
typename sub_universe_type::local_type sub_local_universe
Definition tables.hpp:39
CUDA Tables(AType uid, sub_ptr sub, const allocator_type &alloc=allocator_type())
Definition tables.hpp:225
CUDA void flatten_or(const F &f, typename F::Sequence &disjuncts) const
Definition tables.hpp:375
CUDA this_type & tell(AVar x, const sub_universe_type &dom, BInc< Mem > &has_changed)
Definition tables.hpp:639
static constexpr const bool sequential
Definition tables.hpp:46
CUDA void crefine(size_t table_num, size_t col, BInc< Mem > &has_changed)
Definition tables.hpp:677
CUDA void restore(const snapshot_type< Alloc2 > &snap)
Definition tables.hpp:342
static constexpr const bool preserve_bot
Definition tables.hpp:48
CUDA AType aty() const
Definition tables.hpp:245
abstract_ptr< sub_type > sub_ptr
Definition tables.hpp:42
CUDA size_t num_tables() const
Definition tables.hpp:586
static CUDA this_type bot(Env &env, const allocator_type &alloc=allocator_type(), const sub_allocator_type &sub_alloc=sub_allocator_type())
Definition tables.hpp:288
CUDA this_type & tell(const tell_type< Alloc > &t)
Definition tables.hpp:628
Allocator allocator_type
Definition tables.hpp:34
CUDA Tables(AType uid, AType store_aty, sub_ptr sub, const allocator_type &alloc=allocator_type())
Definition tables.hpp:210
typename A::universe_type sub_universe_type
Definition tables.hpp:38
CUDA void flatten_and(const F &f, typename F::Sequence &conjuncts) const
Definition tables.hpp:363
CUDA size_t num_refinements() const
Definition tables.hpp:699
A sub_type
Definition tables.hpp:41
CUDA F flatten(const F &f, const typename F::allocator_type &alloc) const
Definition tables.hpp:394
U universe_type
Definition tables.hpp:36
CUDA NI TFormula< typename Env::allocator_type > deinterpret(const Env &env) const
Definition tables.hpp:756
CUDA sub_universe_type project(AVar x) const
Definition tables.hpp:751
CUDA this_type & tell(const tell_type< Alloc > &t, BInc< Mem > &has_changed)
Definition tables.hpp:608
static constexpr const bool is_abstract_universe
Definition tables.hpp:45
CUDA local::BDec is_bot() const
Definition tables.hpp:257
CUDA sub_ptr subdomain() const
Definition tables.hpp:253
typename universe_type::memory_type memory_type
Definition tables.hpp:40
static CUDA this_type top(AType atype=UNTYPED, AType atype_sub=UNTYPED, const allocator_type &alloc=allocator_type(), const sub_allocator_type &sub_alloc=sub_allocator_type())
Definition tables.hpp:279
static constexpr const bool injective_concretization
Definition tables.hpp:53
battery::dynamic_bitset< memory_type, allocator_type > bitset_type
Definition tables.hpp:62
static CUDA this_type top(Env &env, const allocator_type &alloc=allocator_type(), const sub_allocator_type &sub_alloc=sub_allocator_type())
Definition tables.hpp:298
CUDA const sub_universe_type & operator[](int x) const
Definition tables.hpp:578
Definition bab.hpp:13
Definition tables.hpp:172
CUDA ask_type(const Alloc &alloc=Alloc{})
Definition tables.hpp:181
ask_type(ask_type &&)=default
CUDA allocator_type get_allocator() const
Definition tables.hpp:198
battery::vector< battery::vector< battery::vector< universe_type, Alloc >, Alloc >, Alloc > ask_tables
Definition tables.hpp:179
ask_type(const ask_type &)=default
CUDA NI ask_type(const TableAskType &other, const Alloc &alloc=Alloc{})
Definition tables.hpp:192
ask_type & operator=(ask_type &&)=default
A::template ask_type< Alloc > sub
Definition tables.hpp:175
Alloc allocator_type
Definition tables.hpp:173
battery::vector< battery::vector< AVar, Alloc >, Alloc > headers
Definition tables.hpp:176
ask_type & operator=(const ask_type &)=default
Definition tables.hpp:308
snapshot_type(const snapshot_type< Alloc2 > &)=default
sub_snap_type sub_snap
Definition tables.hpp:310
size_t num_tables
Definition tables.hpp:311
battery::vector< bitset_type, Alloc2 > bitset_store
Definition tables.hpp:313
snapshot_type< Alloc2 > & operator=(const snapshot_type< Alloc2 > &)=default
sub_type::template snapshot_type< Alloc2 > sub_snap_type
Definition tables.hpp:309
CUDA snapshot_type(const SnapshotType &other, const Alloc2 &alloc=Alloc2())
Definition tables.hpp:321
snapshot_type(snapshot_type< Alloc2 > &&)=default
snapshot_type< Alloc2 > & operator=(snapshot_type< Alloc2 > &&)=default
CUDA snapshot_type(sub_snap_type &&sub_snap, size_t num_tables, size_t total_cells, const battery::vector< bitset_type, allocator_type > &bitset_store, const Alloc2 &alloc=Alloc2())
Definition tables.hpp:328
size_t total_cells
Definition tables.hpp:312
Definition tables.hpp:129
tell_type(const tell_type &)=default
battery::vector< battery::vector< battery::vector< universe_type, Alloc >, Alloc >, Alloc > tell_tables
Definition tables.hpp:138
CUDA tell_type(const Alloc &alloc=Alloc{})
Definition tables.hpp:144
battery::vector< battery::vector< AVar, Alloc >, Alloc > headers
Definition tables.hpp:134
Alloc allocator_type
Definition tables.hpp:130
tell_type(tell_type &&)=default
CUDA allocator_type get_allocator() const
Definition tables.hpp:163
battery::vector< battery::vector< battery::vector< universe_type, Alloc >, Alloc >, Alloc > ask_tables
Definition tables.hpp:142
tell_type & operator=(const tell_type &)=default
CUDA NI tell_type(const TableTellType &other, const Alloc &alloc=Alloc{})
Definition tables.hpp:156
tell_type & operator=(tell_type &&)=default
A::template tell_type< Alloc > sub
Definition tables.hpp:132