LLVM 19.0.0git
Z3Solver.cpp
Go to the documentation of this file.
1//== Z3Solver.cpp -----------------------------------------------*- C++ -*--==//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "llvm/Config/config.h"
10#include "llvm/Support/SMTAPI.h"
11
12using namespace llvm;
13
14#if LLVM_WITH_Z3
15
17#include "llvm/ADT/Twine.h"
18
19#include <set>
20
21#include <z3.h>
22
23namespace {
24
25/// Configuration class for Z3
26class Z3Config {
27 friend class Z3Context;
28
29 Z3_config Config;
30
31public:
32 Z3Config() : Config(Z3_mk_config()) {
33 // Enable model finding
34 Z3_set_param_value(Config, "model", "true");
35 // Disable proof generation
36 Z3_set_param_value(Config, "proof", "false");
37 // Set timeout to 15000ms = 15s
38 Z3_set_param_value(Config, "timeout", "15000");
39 }
40
41 ~Z3Config() { Z3_del_config(Config); }
42}; // end class Z3Config
43
44// Function used to report errors
45void Z3ErrorHandler(Z3_context Context, Z3_error_code Error) {
46 llvm::report_fatal_error("Z3 error: " +
47 llvm::Twine(Z3_get_error_msg(Context, Error)));
48}
49
50/// Wrapper for Z3 context
51class Z3Context {
52public:
53 Z3_context Context;
54
55 Z3Context() {
56 Context = Z3_mk_context_rc(Z3Config().Config);
57 // The error function is set here because the context is the first object
58 // created by the backend
59 Z3_set_error_handler(Context, Z3ErrorHandler);
60 }
61
62 virtual ~Z3Context() {
63 Z3_del_context(Context);
64 Context = nullptr;
65 }
66}; // end class Z3Context
67
68/// Wrapper for Z3 Sort
69class Z3Sort : public SMTSort {
70 friend class Z3Solver;
71
72 Z3Context &Context;
73
74 Z3_sort Sort;
75
76public:
77 /// Default constructor, mainly used by make_shared
78 Z3Sort(Z3Context &C, Z3_sort ZS) : Context(C), Sort(ZS) {
79 Z3_inc_ref(Context.Context, reinterpret_cast<Z3_ast>(Sort));
80 }
81
82 /// Override implicit copy constructor for correct reference counting.
83 Z3Sort(const Z3Sort &Other) : Context(Other.Context), Sort(Other.Sort) {
84 Z3_inc_ref(Context.Context, reinterpret_cast<Z3_ast>(Sort));
85 }
86
87 /// Override implicit copy assignment constructor for correct reference
88 /// counting.
89 Z3Sort &operator=(const Z3Sort &Other) {
90 Z3_inc_ref(Context.Context, reinterpret_cast<Z3_ast>(Other.Sort));
91 Z3_dec_ref(Context.Context, reinterpret_cast<Z3_ast>(Sort));
92 Sort = Other.Sort;
93 return *this;
94 }
95
96 Z3Sort(Z3Sort &&Other) = delete;
97 Z3Sort &operator=(Z3Sort &&Other) = delete;
98
99 ~Z3Sort() {
100 if (Sort)
101 Z3_dec_ref(Context.Context, reinterpret_cast<Z3_ast>(Sort));
102 }
103
104 void Profile(llvm::FoldingSetNodeID &ID) const override {
105 ID.AddInteger(
106 Z3_get_ast_id(Context.Context, reinterpret_cast<Z3_ast>(Sort)));
107 }
108
109 bool isBitvectorSortImpl() const override {
110 return (Z3_get_sort_kind(Context.Context, Sort) == Z3_BV_SORT);
111 }
112
113 bool isFloatSortImpl() const override {
114 return (Z3_get_sort_kind(Context.Context, Sort) == Z3_FLOATING_POINT_SORT);
115 }
116
117 bool isBooleanSortImpl() const override {
118 return (Z3_get_sort_kind(Context.Context, Sort) == Z3_BOOL_SORT);
119 }
120
121 unsigned getBitvectorSortSizeImpl() const override {
122 return Z3_get_bv_sort_size(Context.Context, Sort);
123 }
124
125 unsigned getFloatSortSizeImpl() const override {
126 return Z3_fpa_get_ebits(Context.Context, Sort) +
127 Z3_fpa_get_sbits(Context.Context, Sort);
128 }
129
130 bool equal_to(SMTSort const &Other) const override {
131 return Z3_is_eq_sort(Context.Context, Sort,
132 static_cast<const Z3Sort &>(Other).Sort);
133 }
134
135 void print(raw_ostream &OS) const override {
136 OS << Z3_sort_to_string(Context.Context, Sort);
137 }
138}; // end class Z3Sort
139
140static const Z3Sort &toZ3Sort(const SMTSort &S) {
141 return static_cast<const Z3Sort &>(S);
142}
143
144class Z3Expr : public SMTExpr {
145 friend class Z3Solver;
146
147 Z3Context &Context;
148
149 Z3_ast AST;
150
151public:
152 Z3Expr(Z3Context &C, Z3_ast ZA) : SMTExpr(), Context(C), AST(ZA) {
153 Z3_inc_ref(Context.Context, AST);
154 }
155
156 /// Override implicit copy constructor for correct reference counting.
157 Z3Expr(const Z3Expr &Copy) : SMTExpr(), Context(Copy.Context), AST(Copy.AST) {
158 Z3_inc_ref(Context.Context, AST);
159 }
160
161 /// Override implicit copy assignment constructor for correct reference
162 /// counting.
163 Z3Expr &operator=(const Z3Expr &Other) {
164 Z3_inc_ref(Context.Context, Other.AST);
165 Z3_dec_ref(Context.Context, AST);
166 AST = Other.AST;
167 return *this;
168 }
169
170 Z3Expr(Z3Expr &&Other) = delete;
171 Z3Expr &operator=(Z3Expr &&Other) = delete;
172
173 ~Z3Expr() {
174 if (AST)
175 Z3_dec_ref(Context.Context, AST);
176 }
177
178 void Profile(llvm::FoldingSetNodeID &ID) const override {
179 ID.AddInteger(Z3_get_ast_id(Context.Context, AST));
180 }
181
182 /// Comparison of AST equality, not model equivalence.
183 bool equal_to(SMTExpr const &Other) const override {
184 assert(Z3_is_eq_sort(Context.Context, Z3_get_sort(Context.Context, AST),
185 Z3_get_sort(Context.Context,
186 static_cast<const Z3Expr &>(Other).AST)) &&
187 "AST's must have the same sort");
188 return Z3_is_eq_ast(Context.Context, AST,
189 static_cast<const Z3Expr &>(Other).AST);
190 }
191
192 void print(raw_ostream &OS) const override {
193 OS << Z3_ast_to_string(Context.Context, AST);
194 }
195}; // end class Z3Expr
196
197static const Z3Expr &toZ3Expr(const SMTExpr &E) {
198 return static_cast<const Z3Expr &>(E);
199}
200
201class Z3Model {
202 friend class Z3Solver;
203
204 Z3Context &Context;
205
206 Z3_model Model;
207
208public:
209 Z3Model(Z3Context &C, Z3_model ZM) : Context(C), Model(ZM) {
210 Z3_model_inc_ref(Context.Context, Model);
211 }
212
213 Z3Model(const Z3Model &Other) = delete;
214 Z3Model(Z3Model &&Other) = delete;
215 Z3Model &operator=(Z3Model &Other) = delete;
216 Z3Model &operator=(Z3Model &&Other) = delete;
217
218 ~Z3Model() {
219 if (Model)
220 Z3_model_dec_ref(Context.Context, Model);
221 }
222
223 void print(raw_ostream &OS) const {
224 OS << Z3_model_to_string(Context.Context, Model);
225 }
226
227 LLVM_DUMP_METHOD void dump() const { print(llvm::errs()); }
228}; // end class Z3Model
229
230/// Get the corresponding IEEE floating-point type for a given bitwidth.
231static const llvm::fltSemantics &getFloatSemantics(unsigned BitWidth) {
232 switch (BitWidth) {
233 default:
234 llvm_unreachable("Unsupported floating-point semantics!");
235 break;
236 case 16:
238 case 32:
240 case 64:
242 case 128:
244 }
245}
246
247// Determine whether two float semantics are equivalent
248static bool areEquivalent(const llvm::fltSemantics &LHS,
249 const llvm::fltSemantics &RHS) {
258}
259
260class Z3Solver : public SMTSolver {
261 friend class Z3ConstraintManager;
262
263 Z3Context Context;
264
265 Z3_solver Solver;
266
267 // Cache Sorts
268 std::set<Z3Sort> CachedSorts;
269
270 // Cache Exprs
271 std::set<Z3Expr> CachedExprs;
272
273public:
274 Z3Solver() : Solver(Z3_mk_simple_solver(Context.Context)) {
275 Z3_solver_inc_ref(Context.Context, Solver);
276 }
277
278 Z3Solver(const Z3Solver &Other) = delete;
279 Z3Solver(Z3Solver &&Other) = delete;
280 Z3Solver &operator=(Z3Solver &Other) = delete;
281 Z3Solver &operator=(Z3Solver &&Other) = delete;
282
283 ~Z3Solver() {
284 if (Solver)
285 Z3_solver_dec_ref(Context.Context, Solver);
286 }
287
288 void addConstraint(const SMTExprRef &Exp) const override {
289 Z3_solver_assert(Context.Context, Solver, toZ3Expr(*Exp).AST);
290 }
291
292 // Given an SMTSort, adds/retrives it from the cache and returns
293 // an SMTSortRef to the SMTSort in the cache
294 SMTSortRef newSortRef(const SMTSort &Sort) {
295 auto It = CachedSorts.insert(toZ3Sort(Sort));
296 return &(*It.first);
297 }
298
299 // Given an SMTExpr, adds/retrives it from the cache and returns
300 // an SMTExprRef to the SMTExpr in the cache
301 SMTExprRef newExprRef(const SMTExpr &Exp) {
302 auto It = CachedExprs.insert(toZ3Expr(Exp));
303 return &(*It.first);
304 }
305
306 SMTSortRef getBoolSort() override {
307 return newSortRef(Z3Sort(Context, Z3_mk_bool_sort(Context.Context)));
308 }
309
310 SMTSortRef getBitvectorSort(unsigned BitWidth) override {
311 return newSortRef(
312 Z3Sort(Context, Z3_mk_bv_sort(Context.Context, BitWidth)));
313 }
314
315 SMTSortRef getSort(const SMTExprRef &Exp) override {
316 return newSortRef(
317 Z3Sort(Context, Z3_get_sort(Context.Context, toZ3Expr(*Exp).AST)));
318 }
319
320 SMTSortRef getFloat16Sort() override {
321 return newSortRef(Z3Sort(Context, Z3_mk_fpa_sort_16(Context.Context)));
322 }
323
324 SMTSortRef getFloat32Sort() override {
325 return newSortRef(Z3Sort(Context, Z3_mk_fpa_sort_32(Context.Context)));
326 }
327
328 SMTSortRef getFloat64Sort() override {
329 return newSortRef(Z3Sort(Context, Z3_mk_fpa_sort_64(Context.Context)));
330 }
331
332 SMTSortRef getFloat128Sort() override {
333 return newSortRef(Z3Sort(Context, Z3_mk_fpa_sort_128(Context.Context)));
334 }
335
336 SMTExprRef mkBVNeg(const SMTExprRef &Exp) override {
337 return newExprRef(
338 Z3Expr(Context, Z3_mk_bvneg(Context.Context, toZ3Expr(*Exp).AST)));
339 }
340
341 SMTExprRef mkBVNot(const SMTExprRef &Exp) override {
342 return newExprRef(
343 Z3Expr(Context, Z3_mk_bvnot(Context.Context, toZ3Expr(*Exp).AST)));
344 }
345
346 SMTExprRef mkNot(const SMTExprRef &Exp) override {
347 return newExprRef(
348 Z3Expr(Context, Z3_mk_not(Context.Context, toZ3Expr(*Exp).AST)));
349 }
350
351 SMTExprRef mkBVAdd(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
352 return newExprRef(
353 Z3Expr(Context, Z3_mk_bvadd(Context.Context, toZ3Expr(*LHS).AST,
354 toZ3Expr(*RHS).AST)));
355 }
356
357 SMTExprRef mkBVSub(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
358 return newExprRef(
359 Z3Expr(Context, Z3_mk_bvsub(Context.Context, toZ3Expr(*LHS).AST,
360 toZ3Expr(*RHS).AST)));
361 }
362
363 SMTExprRef mkBVMul(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
364 return newExprRef(
365 Z3Expr(Context, Z3_mk_bvmul(Context.Context, toZ3Expr(*LHS).AST,
366 toZ3Expr(*RHS).AST)));
367 }
368
369 SMTExprRef mkBVSRem(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
370 return newExprRef(
371 Z3Expr(Context, Z3_mk_bvsrem(Context.Context, toZ3Expr(*LHS).AST,
372 toZ3Expr(*RHS).AST)));
373 }
374
375 SMTExprRef mkBVURem(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
376 return newExprRef(
377 Z3Expr(Context, Z3_mk_bvurem(Context.Context, toZ3Expr(*LHS).AST,
378 toZ3Expr(*RHS).AST)));
379 }
380
381 SMTExprRef mkBVSDiv(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
382 return newExprRef(
383 Z3Expr(Context, Z3_mk_bvsdiv(Context.Context, toZ3Expr(*LHS).AST,
384 toZ3Expr(*RHS).AST)));
385 }
386
387 SMTExprRef mkBVUDiv(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
388 return newExprRef(
389 Z3Expr(Context, Z3_mk_bvudiv(Context.Context, toZ3Expr(*LHS).AST,
390 toZ3Expr(*RHS).AST)));
391 }
392
393 SMTExprRef mkBVShl(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
394 return newExprRef(
395 Z3Expr(Context, Z3_mk_bvshl(Context.Context, toZ3Expr(*LHS).AST,
396 toZ3Expr(*RHS).AST)));
397 }
398
399 SMTExprRef mkBVAshr(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
400 return newExprRef(
401 Z3Expr(Context, Z3_mk_bvashr(Context.Context, toZ3Expr(*LHS).AST,
402 toZ3Expr(*RHS).AST)));
403 }
404
405 SMTExprRef mkBVLshr(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
406 return newExprRef(
407 Z3Expr(Context, Z3_mk_bvlshr(Context.Context, toZ3Expr(*LHS).AST,
408 toZ3Expr(*RHS).AST)));
409 }
410
411 SMTExprRef mkBVXor(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
412 return newExprRef(
413 Z3Expr(Context, Z3_mk_bvxor(Context.Context, toZ3Expr(*LHS).AST,
414 toZ3Expr(*RHS).AST)));
415 }
416
417 SMTExprRef mkBVOr(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
418 return newExprRef(
419 Z3Expr(Context, Z3_mk_bvor(Context.Context, toZ3Expr(*LHS).AST,
420 toZ3Expr(*RHS).AST)));
421 }
422
423 SMTExprRef mkBVAnd(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
424 return newExprRef(
425 Z3Expr(Context, Z3_mk_bvand(Context.Context, toZ3Expr(*LHS).AST,
426 toZ3Expr(*RHS).AST)));
427 }
428
429 SMTExprRef mkBVUlt(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
430 return newExprRef(
431 Z3Expr(Context, Z3_mk_bvult(Context.Context, toZ3Expr(*LHS).AST,
432 toZ3Expr(*RHS).AST)));
433 }
434
435 SMTExprRef mkBVSlt(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
436 return newExprRef(
437 Z3Expr(Context, Z3_mk_bvslt(Context.Context, toZ3Expr(*LHS).AST,
438 toZ3Expr(*RHS).AST)));
439 }
440
441 SMTExprRef mkBVUgt(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
442 return newExprRef(
443 Z3Expr(Context, Z3_mk_bvugt(Context.Context, toZ3Expr(*LHS).AST,
444 toZ3Expr(*RHS).AST)));
445 }
446
447 SMTExprRef mkBVSgt(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
448 return newExprRef(
449 Z3Expr(Context, Z3_mk_bvsgt(Context.Context, toZ3Expr(*LHS).AST,
450 toZ3Expr(*RHS).AST)));
451 }
452
453 SMTExprRef mkBVUle(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
454 return newExprRef(
455 Z3Expr(Context, Z3_mk_bvule(Context.Context, toZ3Expr(*LHS).AST,
456 toZ3Expr(*RHS).AST)));
457 }
458
459 SMTExprRef mkBVSle(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
460 return newExprRef(
461 Z3Expr(Context, Z3_mk_bvsle(Context.Context, toZ3Expr(*LHS).AST,
462 toZ3Expr(*RHS).AST)));
463 }
464
465 SMTExprRef mkBVUge(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
466 return newExprRef(
467 Z3Expr(Context, Z3_mk_bvuge(Context.Context, toZ3Expr(*LHS).AST,
468 toZ3Expr(*RHS).AST)));
469 }
470
471 SMTExprRef mkBVSge(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
472 return newExprRef(
473 Z3Expr(Context, Z3_mk_bvsge(Context.Context, toZ3Expr(*LHS).AST,
474 toZ3Expr(*RHS).AST)));
475 }
476
477 SMTExprRef mkAnd(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
478 Z3_ast Args[2] = {toZ3Expr(*LHS).AST, toZ3Expr(*RHS).AST};
479 return newExprRef(Z3Expr(Context, Z3_mk_and(Context.Context, 2, Args)));
480 }
481
482 SMTExprRef mkOr(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
483 Z3_ast Args[2] = {toZ3Expr(*LHS).AST, toZ3Expr(*RHS).AST};
484 return newExprRef(Z3Expr(Context, Z3_mk_or(Context.Context, 2, Args)));
485 }
486
487 SMTExprRef mkEqual(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
488 return newExprRef(
489 Z3Expr(Context, Z3_mk_eq(Context.Context, toZ3Expr(*LHS).AST,
490 toZ3Expr(*RHS).AST)));
491 }
492
493 SMTExprRef mkFPNeg(const SMTExprRef &Exp) override {
494 return newExprRef(
495 Z3Expr(Context, Z3_mk_fpa_neg(Context.Context, toZ3Expr(*Exp).AST)));
496 }
497
498 SMTExprRef mkFPIsInfinite(const SMTExprRef &Exp) override {
499 return newExprRef(Z3Expr(
500 Context, Z3_mk_fpa_is_infinite(Context.Context, toZ3Expr(*Exp).AST)));
501 }
502
503 SMTExprRef mkFPIsNaN(const SMTExprRef &Exp) override {
504 return newExprRef(
505 Z3Expr(Context, Z3_mk_fpa_is_nan(Context.Context, toZ3Expr(*Exp).AST)));
506 }
507
508 SMTExprRef mkFPIsNormal(const SMTExprRef &Exp) override {
509 return newExprRef(Z3Expr(
510 Context, Z3_mk_fpa_is_normal(Context.Context, toZ3Expr(*Exp).AST)));
511 }
512
513 SMTExprRef mkFPIsZero(const SMTExprRef &Exp) override {
514 return newExprRef(Z3Expr(
515 Context, Z3_mk_fpa_is_zero(Context.Context, toZ3Expr(*Exp).AST)));
516 }
517
518 SMTExprRef mkFPMul(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
519 SMTExprRef RoundingMode = getFloatRoundingMode();
520 return newExprRef(
521 Z3Expr(Context,
522 Z3_mk_fpa_mul(Context.Context, toZ3Expr(*RoundingMode).AST,
523 toZ3Expr(*LHS).AST, toZ3Expr(*RHS).AST)));
524 }
525
526 SMTExprRef mkFPDiv(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
527 SMTExprRef RoundingMode = getFloatRoundingMode();
528 return newExprRef(
529 Z3Expr(Context,
530 Z3_mk_fpa_div(Context.Context, toZ3Expr(*RoundingMode).AST,
531 toZ3Expr(*LHS).AST, toZ3Expr(*RHS).AST)));
532 }
533
534 SMTExprRef mkFPRem(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
535 return newExprRef(
536 Z3Expr(Context, Z3_mk_fpa_rem(Context.Context, toZ3Expr(*LHS).AST,
537 toZ3Expr(*RHS).AST)));
538 }
539
540 SMTExprRef mkFPAdd(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
541 SMTExprRef RoundingMode = getFloatRoundingMode();
542 return newExprRef(
543 Z3Expr(Context,
544 Z3_mk_fpa_add(Context.Context, toZ3Expr(*RoundingMode).AST,
545 toZ3Expr(*LHS).AST, toZ3Expr(*RHS).AST)));
546 }
547
548 SMTExprRef mkFPSub(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
549 SMTExprRef RoundingMode = getFloatRoundingMode();
550 return newExprRef(
551 Z3Expr(Context,
552 Z3_mk_fpa_sub(Context.Context, toZ3Expr(*RoundingMode).AST,
553 toZ3Expr(*LHS).AST, toZ3Expr(*RHS).AST)));
554 }
555
556 SMTExprRef mkFPLt(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
557 return newExprRef(
558 Z3Expr(Context, Z3_mk_fpa_lt(Context.Context, toZ3Expr(*LHS).AST,
559 toZ3Expr(*RHS).AST)));
560 }
561
562 SMTExprRef mkFPGt(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
563 return newExprRef(
564 Z3Expr(Context, Z3_mk_fpa_gt(Context.Context, toZ3Expr(*LHS).AST,
565 toZ3Expr(*RHS).AST)));
566 }
567
568 SMTExprRef mkFPLe(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
569 return newExprRef(
570 Z3Expr(Context, Z3_mk_fpa_leq(Context.Context, toZ3Expr(*LHS).AST,
571 toZ3Expr(*RHS).AST)));
572 }
573
574 SMTExprRef mkFPGe(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
575 return newExprRef(
576 Z3Expr(Context, Z3_mk_fpa_geq(Context.Context, toZ3Expr(*LHS).AST,
577 toZ3Expr(*RHS).AST)));
578 }
579
580 SMTExprRef mkFPEqual(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
581 return newExprRef(
582 Z3Expr(Context, Z3_mk_fpa_eq(Context.Context, toZ3Expr(*LHS).AST,
583 toZ3Expr(*RHS).AST)));
584 }
585
586 SMTExprRef mkIte(const SMTExprRef &Cond, const SMTExprRef &T,
587 const SMTExprRef &F) override {
588 return newExprRef(
589 Z3Expr(Context, Z3_mk_ite(Context.Context, toZ3Expr(*Cond).AST,
590 toZ3Expr(*T).AST, toZ3Expr(*F).AST)));
591 }
592
593 SMTExprRef mkBVSignExt(unsigned i, const SMTExprRef &Exp) override {
594 return newExprRef(Z3Expr(
595 Context, Z3_mk_sign_ext(Context.Context, i, toZ3Expr(*Exp).AST)));
596 }
597
598 SMTExprRef mkBVZeroExt(unsigned i, const SMTExprRef &Exp) override {
599 return newExprRef(Z3Expr(
600 Context, Z3_mk_zero_ext(Context.Context, i, toZ3Expr(*Exp).AST)));
601 }
602
603 SMTExprRef mkBVExtract(unsigned High, unsigned Low,
604 const SMTExprRef &Exp) override {
605 return newExprRef(Z3Expr(Context, Z3_mk_extract(Context.Context, High, Low,
606 toZ3Expr(*Exp).AST)));
607 }
608
609 /// Creates a predicate that checks for overflow in a bitvector addition
610 /// operation
611 SMTExprRef mkBVAddNoOverflow(const SMTExprRef &LHS, const SMTExprRef &RHS,
612 bool isSigned) override {
613 return newExprRef(Z3Expr(
614 Context, Z3_mk_bvadd_no_overflow(Context.Context, toZ3Expr(*LHS).AST,
615 toZ3Expr(*RHS).AST, isSigned)));
616 }
617
618 /// Creates a predicate that checks for underflow in a signed bitvector
619 /// addition operation
620 SMTExprRef mkBVAddNoUnderflow(const SMTExprRef &LHS,
621 const SMTExprRef &RHS) override {
622 return newExprRef(Z3Expr(
623 Context, Z3_mk_bvadd_no_underflow(Context.Context, toZ3Expr(*LHS).AST,
624 toZ3Expr(*RHS).AST)));
625 }
626
627 /// Creates a predicate that checks for overflow in a signed bitvector
628 /// subtraction operation
629 SMTExprRef mkBVSubNoOverflow(const SMTExprRef &LHS,
630 const SMTExprRef &RHS) override {
631 return newExprRef(Z3Expr(
632 Context, Z3_mk_bvsub_no_overflow(Context.Context, toZ3Expr(*LHS).AST,
633 toZ3Expr(*RHS).AST)));
634 }
635
636 /// Creates a predicate that checks for underflow in a bitvector subtraction
637 /// operation
638 SMTExprRef mkBVSubNoUnderflow(const SMTExprRef &LHS, const SMTExprRef &RHS,
639 bool isSigned) override {
640 return newExprRef(Z3Expr(
641 Context, Z3_mk_bvsub_no_underflow(Context.Context, toZ3Expr(*LHS).AST,
642 toZ3Expr(*RHS).AST, isSigned)));
643 }
644
645 /// Creates a predicate that checks for overflow in a signed bitvector
646 /// division/modulus operation
647 SMTExprRef mkBVSDivNoOverflow(const SMTExprRef &LHS,
648 const SMTExprRef &RHS) override {
649 return newExprRef(Z3Expr(
650 Context, Z3_mk_bvsdiv_no_overflow(Context.Context, toZ3Expr(*LHS).AST,
651 toZ3Expr(*RHS).AST)));
652 }
653
654 /// Creates a predicate that checks for overflow in a bitvector negation
655 /// operation
656 SMTExprRef mkBVNegNoOverflow(const SMTExprRef &Exp) override {
657 return newExprRef(Z3Expr(
658 Context, Z3_mk_bvneg_no_overflow(Context.Context, toZ3Expr(*Exp).AST)));
659 }
660
661 /// Creates a predicate that checks for overflow in a bitvector multiplication
662 /// operation
663 SMTExprRef mkBVMulNoOverflow(const SMTExprRef &LHS, const SMTExprRef &RHS,
664 bool isSigned) override {
665 return newExprRef(Z3Expr(
666 Context, Z3_mk_bvmul_no_overflow(Context.Context, toZ3Expr(*LHS).AST,
667 toZ3Expr(*RHS).AST, isSigned)));
668 }
669
670 /// Creates a predicate that checks for underflow in a signed bitvector
671 /// multiplication operation
672 SMTExprRef mkBVMulNoUnderflow(const SMTExprRef &LHS,
673 const SMTExprRef &RHS) override {
674 return newExprRef(Z3Expr(
675 Context, Z3_mk_bvmul_no_underflow(Context.Context, toZ3Expr(*LHS).AST,
676 toZ3Expr(*RHS).AST)));
677 }
678
679 SMTExprRef mkBVConcat(const SMTExprRef &LHS, const SMTExprRef &RHS) override {
680 return newExprRef(
681 Z3Expr(Context, Z3_mk_concat(Context.Context, toZ3Expr(*LHS).AST,
682 toZ3Expr(*RHS).AST)));
683 }
684
685 SMTExprRef mkFPtoFP(const SMTExprRef &From, const SMTSortRef &To) override {
686 SMTExprRef RoundingMode = getFloatRoundingMode();
687 return newExprRef(Z3Expr(
688 Context,
689 Z3_mk_fpa_to_fp_float(Context.Context, toZ3Expr(*RoundingMode).AST,
690 toZ3Expr(*From).AST, toZ3Sort(*To).Sort)));
691 }
692
693 SMTExprRef mkSBVtoFP(const SMTExprRef &From, const SMTSortRef &To) override {
694 SMTExprRef RoundingMode = getFloatRoundingMode();
695 return newExprRef(Z3Expr(
696 Context,
697 Z3_mk_fpa_to_fp_signed(Context.Context, toZ3Expr(*RoundingMode).AST,
698 toZ3Expr(*From).AST, toZ3Sort(*To).Sort)));
699 }
700
701 SMTExprRef mkUBVtoFP(const SMTExprRef &From, const SMTSortRef &To) override {
702 SMTExprRef RoundingMode = getFloatRoundingMode();
703 return newExprRef(Z3Expr(
704 Context,
705 Z3_mk_fpa_to_fp_unsigned(Context.Context, toZ3Expr(*RoundingMode).AST,
706 toZ3Expr(*From).AST, toZ3Sort(*To).Sort)));
707 }
708
709 SMTExprRef mkFPtoSBV(const SMTExprRef &From, unsigned ToWidth) override {
710 SMTExprRef RoundingMode = getFloatRoundingMode();
711 return newExprRef(Z3Expr(
712 Context, Z3_mk_fpa_to_sbv(Context.Context, toZ3Expr(*RoundingMode).AST,
713 toZ3Expr(*From).AST, ToWidth)));
714 }
715
716 SMTExprRef mkFPtoUBV(const SMTExprRef &From, unsigned ToWidth) override {
717 SMTExprRef RoundingMode = getFloatRoundingMode();
718 return newExprRef(Z3Expr(
719 Context, Z3_mk_fpa_to_ubv(Context.Context, toZ3Expr(*RoundingMode).AST,
720 toZ3Expr(*From).AST, ToWidth)));
721 }
722
723 SMTExprRef mkBoolean(const bool b) override {
724 return newExprRef(Z3Expr(Context, b ? Z3_mk_true(Context.Context)
725 : Z3_mk_false(Context.Context)));
726 }
727
728 SMTExprRef mkBitvector(const llvm::APSInt Int, unsigned BitWidth) override {
729 const Z3_sort Z3Sort = toZ3Sort(*getBitvectorSort(BitWidth)).Sort;
730
731 // Slow path, when 64 bits are not enough.
732 if (LLVM_UNLIKELY(!Int.isRepresentableByInt64())) {
733 SmallString<40> Buffer;
734 Int.toString(Buffer, 10);
735 return newExprRef(Z3Expr(
736 Context, Z3_mk_numeral(Context.Context, Buffer.c_str(), Z3Sort)));
737 }
738
739 const int64_t BitReprAsSigned = Int.getExtValue();
740 const uint64_t BitReprAsUnsigned =
741 reinterpret_cast<const uint64_t &>(BitReprAsSigned);
742
743 Z3_ast Literal =
744 Int.isSigned()
745 ? Z3_mk_int64(Context.Context, BitReprAsSigned, Z3Sort)
746 : Z3_mk_unsigned_int64(Context.Context, BitReprAsUnsigned, Z3Sort);
747 return newExprRef(Z3Expr(Context, Literal));
748 }
749
750 SMTExprRef mkFloat(const llvm::APFloat Float) override {
751 SMTSortRef Sort =
752 getFloatSort(llvm::APFloat::semanticsSizeInBits(Float.getSemantics()));
753
754 llvm::APSInt Int = llvm::APSInt(Float.bitcastToAPInt(), false);
755 SMTExprRef Z3Int = mkBitvector(Int, Int.getBitWidth());
756 return newExprRef(Z3Expr(
757 Context, Z3_mk_fpa_to_fp_bv(Context.Context, toZ3Expr(*Z3Int).AST,
758 toZ3Sort(*Sort).Sort)));
759 }
760
761 SMTExprRef mkSymbol(const char *Name, SMTSortRef Sort) override {
762 return newExprRef(
763 Z3Expr(Context, Z3_mk_const(Context.Context,
764 Z3_mk_string_symbol(Context.Context, Name),
765 toZ3Sort(*Sort).Sort)));
766 }
767
768 llvm::APSInt getBitvector(const SMTExprRef &Exp, unsigned BitWidth,
769 bool isUnsigned) override {
770 return llvm::APSInt(
772 Z3_get_numeral_string(Context.Context, toZ3Expr(*Exp).AST),
773 10),
774 isUnsigned);
775 }
776
777 bool getBoolean(const SMTExprRef &Exp) override {
778 return Z3_get_bool_value(Context.Context, toZ3Expr(*Exp).AST) == Z3_L_TRUE;
779 }
780
781 SMTExprRef getFloatRoundingMode() override {
782 // TODO: Don't assume nearest ties to even rounding mode
783 return newExprRef(Z3Expr(Context, Z3_mk_fpa_rne(Context.Context)));
784 }
785
786 bool toAPFloat(const SMTSortRef &Sort, const SMTExprRef &AST,
787 llvm::APFloat &Float, bool useSemantics) {
788 assert(Sort->isFloatSort() && "Unsupported sort to floating-point!");
789
790 llvm::APSInt Int(Sort->getFloatSortSize(), true);
791 const llvm::fltSemantics &Semantics =
792 getFloatSemantics(Sort->getFloatSortSize());
793 SMTSortRef BVSort = getBitvectorSort(Sort->getFloatSortSize());
794 if (!toAPSInt(BVSort, AST, Int, true)) {
795 return false;
796 }
797
798 if (useSemantics && !areEquivalent(Float.getSemantics(), Semantics)) {
799 assert(false && "Floating-point types don't match!");
800 return false;
801 }
802
803 Float = llvm::APFloat(Semantics, Int);
804 return true;
805 }
806
807 bool toAPSInt(const SMTSortRef &Sort, const SMTExprRef &AST,
808 llvm::APSInt &Int, bool useSemantics) {
809 if (Sort->isBitvectorSort()) {
810 if (useSemantics && Int.getBitWidth() != Sort->getBitvectorSortSize()) {
811 assert(false && "Bitvector types don't match!");
812 return false;
813 }
814
815 // FIXME: This function is also used to retrieve floating-point values,
816 // which can be 16, 32, 64 or 128 bits long. Bitvectors can be anything
817 // between 1 and 64 bits long, which is the reason we have this weird
818 // guard. In the future, we need proper calls in the backend to retrieve
819 // floating-points and its special values (NaN, +/-infinity, +/-zero),
820 // then we can drop this weird condition.
821 if (Sort->getBitvectorSortSize() <= 64 ||
822 Sort->getBitvectorSortSize() == 128) {
823 Int = getBitvector(AST, Int.getBitWidth(), Int.isUnsigned());
824 return true;
825 }
826
827 assert(false && "Bitwidth not supported!");
828 return false;
829 }
830
831 if (Sort->isBooleanSort()) {
832 if (useSemantics && Int.getBitWidth() < 1) {
833 assert(false && "Boolean type doesn't match!");
834 return false;
835 }
836
837 Int = llvm::APSInt(llvm::APInt(Int.getBitWidth(), getBoolean(AST)),
838 Int.isUnsigned());
839 return true;
840 }
841
842 llvm_unreachable("Unsupported sort to integer!");
843 }
844
845 bool getInterpretation(const SMTExprRef &Exp, llvm::APSInt &Int) override {
846 Z3Model Model(Context, Z3_solver_get_model(Context.Context, Solver));
847 Z3_func_decl Func = Z3_get_app_decl(
848 Context.Context, Z3_to_app(Context.Context, toZ3Expr(*Exp).AST));
849 if (Z3_model_has_interp(Context.Context, Model.Model, Func) != Z3_L_TRUE)
850 return false;
851
852 SMTExprRef Assign = newExprRef(
853 Z3Expr(Context,
854 Z3_model_get_const_interp(Context.Context, Model.Model, Func)));
855 SMTSortRef Sort = getSort(Assign);
856 return toAPSInt(Sort, Assign, Int, true);
857 }
858
859 bool getInterpretation(const SMTExprRef &Exp, llvm::APFloat &Float) override {
860 Z3Model Model(Context, Z3_solver_get_model(Context.Context, Solver));
861 Z3_func_decl Func = Z3_get_app_decl(
862 Context.Context, Z3_to_app(Context.Context, toZ3Expr(*Exp).AST));
863 if (Z3_model_has_interp(Context.Context, Model.Model, Func) != Z3_L_TRUE)
864 return false;
865
866 SMTExprRef Assign = newExprRef(
867 Z3Expr(Context,
868 Z3_model_get_const_interp(Context.Context, Model.Model, Func)));
869 SMTSortRef Sort = getSort(Assign);
870 return toAPFloat(Sort, Assign, Float, true);
871 }
872
873 std::optional<bool> check() const override {
874 Z3_lbool res = Z3_solver_check(Context.Context, Solver);
875 if (res == Z3_L_TRUE)
876 return true;
877
878 if (res == Z3_L_FALSE)
879 return false;
880
881 return std::nullopt;
882 }
883
884 void push() override { return Z3_solver_push(Context.Context, Solver); }
885
886 void pop(unsigned NumStates = 1) override {
887 assert(Z3_solver_get_num_scopes(Context.Context, Solver) >= NumStates);
888 return Z3_solver_pop(Context.Context, Solver, NumStates);
889 }
890
891 bool isFPSupported() override { return true; }
892
893 /// Reset the solver and remove all constraints.
894 void reset() override { Z3_solver_reset(Context.Context, Solver); }
895
896 void print(raw_ostream &OS) const override {
897 OS << Z3_solver_to_string(Context.Context, Solver);
898 }
899}; // end class Z3Solver
900
901} // end anonymous namespace
902
903#endif
904
906#if LLVM_WITH_Z3
907 return std::make_unique<Z3Solver>();
908#else
909 llvm::report_fatal_error("LLVM was not compiled with Z3 support, rebuild "
910 "with -DLLVM_ENABLE_Z3_SOLVER=ON",
911 false);
912 return nullptr;
913#endif
914}
915
static void print(raw_ostream &Out, object::Archive::Kind Kind, T Val)
static void push(SmallVectorImpl< uint64_t > &R, StringRef Str)
BlockVerifier::State From
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
#define LLVM_UNLIKELY(EXPR)
Definition: Compiler.h:241
#define LLVM_DUMP_METHOD
Mark debug helper function definitions like dump() that should not be stripped from debug builds.
Definition: Compiler.h:529
std::string Name
std::optional< std::vector< StOtherPiece > > Other
Definition: ELFYAML.cpp:1291
RelaxConfig Config
Definition: ELF_riscv.cpp:506
static bool isSigned(unsigned int Opcode)
#define check(cond)
#define F(x, y, z)
Definition: MD5.cpp:55
Load MIR Sample Profile
uint64_t High
LLVMContext & Context
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
raw_pwrite_stream & OS
This file defines the SmallString class.
Value * RHS
Value * LHS
Class for arbitrary precision integers.
Definition: APInt.h:76
An arbitrary precision integer that knows its signedness.
Definition: APSInt.h:23
Lightweight error class with error context and mandatory checking.
Definition: Error.h:160
FoldingSetNodeID - This class is used to gather all the unique data bits of a node.
Definition: FoldingSet.h:320
Generic base class for SMT exprs.
Definition: SMTAPI.h:100
LLVM_DUMP_METHOD void dump() const
Definition: Z3Solver.cpp:917
virtual void print(raw_ostream &OS) const =0
Generic base class for SMT Solvers.
Definition: SMTAPI.h:136
virtual void print(raw_ostream &OS) const =0
LLVM_DUMP_METHOD void dump() const
Definition: Z3Solver.cpp:918
Generic base class for SMT sorts.
Definition: SMTAPI.h:26
virtual void print(raw_ostream &OS) const =0
virtual bool isBitvectorSort() const
Returns true if the sort is a bitvector, calls isBitvectorSortImpl().
Definition: SMTAPI.h:32
virtual bool isFloatSort() const
Returns true if the sort is a floating-point, calls isFloatSortImpl().
Definition: SMTAPI.h:35
virtual unsigned getFloatSortSize() const
Returns the floating-point size, fails if the sort is not a floating-point Calls getFloatSortSizeImpl...
Definition: SMTAPI.h:51
virtual unsigned getBitvectorSortSize() const
Returns the bitvector size, fails if the sort is not a bitvector Calls getBitvectorSortSizeImpl().
Definition: SMTAPI.h:42
virtual bool isBooleanSort() const
Returns true if the sort is a boolean, calls isBooleanSortImpl().
Definition: SMTAPI.h:38
LLVM_DUMP_METHOD void dump() const
Definition: Z3Solver.cpp:916
SmallString - A SmallString is just a SmallVector with methods and accessors that make it work better...
Definition: SmallString.h:26
const char * c_str()
Definition: SmallString.h:259
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
Definition: Twine.h:81
This class implements an extremely fast bulk output stream that can only output to a stream.
Definition: raw_ostream.h:52
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ C
The default llvm calling convention, compatible with C.
Definition: CallingConv.h:34
NodeAddr< FuncNode * > Func
Definition: RDFGraph.h:393
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
@ Low
Lower the current thread's priority such that it does not affect foreground tasks significantly.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
Definition: Error.cpp:156
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
RoundingMode
Rounding mode.
constexpr unsigned BitWidth
Definition: BitmaskEnum.h:191
std::shared_ptr< SMTSolver > SMTSolverRef
Shared pointer for SMTSolvers.
Definition: SMTAPI.h:440
SMTSolverRef CreateZ3Solver()
Convenience method to create and Z3Solver object.
Definition: Z3Solver.cpp:905
static const fltSemantics & IEEEsingle() LLVM_READNONE
Definition: APFloat.cpp:249
static ExponentType semanticsMinExponent(const fltSemantics &)
Definition: APFloat.cpp:300
static unsigned int semanticsSizeInBits(const fltSemantics &)
Definition: APFloat.cpp:303
static const fltSemantics & IEEEquad() LLVM_READNONE
Definition: APFloat.cpp:251
static ExponentType semanticsMaxExponent(const fltSemantics &)
Definition: APFloat.cpp:296
static unsigned int semanticsPrecision(const fltSemantics &)
Definition: APFloat.cpp:292
static const fltSemantics & IEEEdouble() LLVM_READNONE
Definition: APFloat.cpp:250
static const fltSemantics & IEEEhalf() LLVM_READNONE
Definition: APFloat.cpp:247