c1115a32d91c08dd0e8075af654a51fae7de2f7d
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82
83 struct target_builtins default_target_builtins;
84 #if SWITCHABLE_TARGET
85 struct target_builtins *this_target_builtins = &default_target_builtins;
86 #endif
87
88 /* Define the names of the builtin function types and codes. */
89 const char *const built_in_class_names[BUILT_IN_LAST]
90 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
91
92 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
93 const char * built_in_names[(int) END_BUILTINS] =
94 {
95 #include "builtins.def"
96 };
97
98 /* Setup an array of builtin_info_type, make sure each element decl is
99 initialized to NULL_TREE. */
100 builtin_info_type builtin_info[(int)END_BUILTINS];
101
102 /* Non-zero if __builtin_constant_p should be folded right away. */
103 bool force_folding_builtin_constant_p;
104
105 static int target_char_cast (tree, char *);
106 static rtx get_memory_rtx (tree, tree);
107 static int apply_args_size (void);
108 static int apply_result_size (void);
109 static rtx result_vector (int, rtx);
110 static void expand_builtin_prefetch (tree);
111 static rtx expand_builtin_apply_args (void);
112 static rtx expand_builtin_apply_args_1 (void);
113 static rtx expand_builtin_apply (rtx, rtx, rtx);
114 static void expand_builtin_return (rtx);
115 static enum type_class type_to_class (tree);
116 static rtx expand_builtin_classify_type (tree);
117 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
119 static rtx expand_builtin_interclass_mathfn (tree, rtx);
120 static rtx expand_builtin_sincos (tree);
121 static rtx expand_builtin_cexpi (tree, rtx);
122 static rtx expand_builtin_int_roundingfn (tree, rtx);
123 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
124 static rtx expand_builtin_next_arg (void);
125 static rtx expand_builtin_va_start (tree);
126 static rtx expand_builtin_va_end (tree);
127 static rtx expand_builtin_va_copy (tree);
128 static rtx inline_expand_builtin_bytecmp (tree, rtx);
129 static rtx expand_builtin_strcmp (tree, rtx);
130 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
131 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
132 static rtx expand_builtin_memchr (tree, rtx);
133 static rtx expand_builtin_memcpy (tree, rtx);
134 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
135 rtx target, tree exp,
136 memop_ret retmode,
137 bool might_overlap);
138 static rtx expand_builtin_memmove (tree, rtx);
139 static rtx expand_builtin_mempcpy (tree, rtx);
140 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
141 static rtx expand_builtin_strcat (tree);
142 static rtx expand_builtin_strcpy (tree, rtx);
143 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
144 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
145 static rtx expand_builtin_stpncpy (tree, rtx);
146 static rtx expand_builtin_strncat (tree, rtx);
147 static rtx expand_builtin_strncpy (tree, rtx);
148 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
149 static rtx expand_builtin_memset (tree, rtx, machine_mode);
150 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
151 static rtx expand_builtin_bzero (tree);
152 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
153 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
154 static rtx expand_builtin_alloca (tree);
155 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
156 static rtx expand_builtin_frame_address (tree, tree);
157 static tree stabilize_va_list_loc (location_t, tree, int);
158 static rtx expand_builtin_expect (tree, rtx);
159 static rtx expand_builtin_expect_with_probability (tree, rtx);
160 static tree fold_builtin_constant_p (tree);
161 static tree fold_builtin_classify_type (tree);
162 static tree fold_builtin_strlen (location_t, tree, tree, tree);
163 static tree fold_builtin_inf (location_t, tree, int);
164 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
165 static bool validate_arg (const_tree, enum tree_code code);
166 static rtx expand_builtin_fabs (tree, rtx, rtx);
167 static rtx expand_builtin_signbit (tree, rtx);
168 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_varargs (location_t, tree, tree*, int);
177
178 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_strspn (location_t, tree, tree, tree);
180 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
181
182 static rtx expand_builtin_object_size (tree);
183 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
184 enum built_in_function);
185 static void maybe_emit_chk_warning (tree, enum built_in_function);
186 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
187 static tree fold_builtin_object_size (tree, tree);
188 static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
189 static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
190 pointer_query *);
191
192 unsigned HOST_WIDE_INT target_newline;
193 unsigned HOST_WIDE_INT target_percent;
194 static unsigned HOST_WIDE_INT target_c;
195 static unsigned HOST_WIDE_INT target_s;
196 char target_percent_c[3];
197 char target_percent_s[3];
198 char target_percent_s_newline[4];
199 static tree do_mpfr_remquo (tree, tree, tree);
200 static tree do_mpfr_lgamma_r (tree, tree, tree);
201 static void expand_builtin_sync_synchronize (void);
202
203 access_ref::access_ref (tree bound /* = NULL_TREE */,
204 bool minaccess /* = false */)
205 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
206 base0 (true), parmarray ()
207 {
208 /* Set to valid. */
209 offrng[0] = offrng[1] = 0;
210 /* Invalidate. */
211 sizrng[0] = sizrng[1] = -1;
212
213 /* Set the default bounds of the access and adjust below. */
214 bndrng[0] = minaccess ? 1 : 0;
215 bndrng[1] = HOST_WIDE_INT_M1U;
216
217 /* When BOUND is nonnull and a range can be extracted from it,
218 set the bounds of the access to reflect both it and MINACCESS.
219 BNDRNG[0] is the size of the minimum access. */
220 tree rng[2];
221 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
222 {
223 bndrng[0] = wi::to_offset (rng[0]);
224 bndrng[1] = wi::to_offset (rng[1]);
225 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
226 }
227 }
228
229 /* Return the PHI node REF refers to or null if it doesn't. */
230
231 gphi *
232 access_ref::phi () const
233 {
234 if (!ref || TREE_CODE (ref) != SSA_NAME)
235 return NULL;
236
237 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
238 if (gimple_code (def_stmt) != GIMPLE_PHI)
239 return NULL;
240
241 return as_a <gphi *> (def_stmt);
242 }
243
244 /* Determine and return the largest object to which *THIS. If *THIS
245 refers to a PHI and PREF is nonnull, fill *PREF with the details
246 of the object determined by compute_objsize(ARG, OSTYPE) for each
247 PHI argument ARG. */
248
249 tree
250 access_ref::get_ref (vec<access_ref> *all_refs,
251 access_ref *pref /* = NULL */,
252 int ostype /* = 1 */,
253 ssa_name_limit_t *psnlim /* = NULL */,
254 pointer_query *qry /* = NULL */) const
255 {
256 gphi *phi_stmt = this->phi ();
257 if (!phi_stmt)
258 return ref;
259
260 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
261 cause unbounded recursion. */
262 ssa_name_limit_t snlim_buf;
263 if (!psnlim)
264 psnlim = &snlim_buf;
265
266 if (!psnlim->visit_phi (ref))
267 return NULL_TREE;
268
269 /* Reflects the range of offsets of all PHI arguments refer to the same
270 object (i.e., have the same REF). */
271 access_ref same_ref;
272 /* The conservative result of the PHI reflecting the offset and size
273 of the largest PHI argument, regardless of whether or not they all
274 refer to the same object. */
275 pointer_query empty_qry;
276 if (!qry)
277 qry = &empty_qry;
278
279 access_ref phi_ref;
280 if (pref)
281 {
282 phi_ref = *pref;
283 same_ref = *pref;
284 }
285
286 /* Set if any argument is a function array (or VLA) parameter not
287 declared [static]. */
288 bool parmarray = false;
289 /* The size of the smallest object referenced by the PHI arguments. */
290 offset_int minsize = 0;
291 const offset_int maxobjsize = wi::to_offset (max_object_size ());
292 /* The offset of the PHI, not reflecting those of its arguments. */
293 const offset_int orng[2] = { phi_ref.offrng[0], phi_ref.offrng[1] };
294
295 const unsigned nargs = gimple_phi_num_args (phi_stmt);
296 for (unsigned i = 0; i < nargs; ++i)
297 {
298 access_ref phi_arg_ref;
299 tree arg = gimple_phi_arg_def (phi_stmt, i);
300 if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
301 || phi_arg_ref.sizrng[0] < 0)
302 /* A PHI with all null pointer arguments. */
303 return NULL_TREE;
304
305 /* Add PREF's offset to that of the argument. */
306 phi_arg_ref.add_offset (orng[0], orng[1]);
307 if (TREE_CODE (arg) == SSA_NAME)
308 qry->put_ref (arg, phi_arg_ref);
309
310 if (all_refs)
311 all_refs->safe_push (phi_arg_ref);
312
313 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
314 || phi_arg_ref.sizrng[1] != maxobjsize);
315
316 parmarray |= phi_arg_ref.parmarray;
317
318 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
319
320 if (phi_ref.sizrng[0] < 0)
321 {
322 if (!nullp)
323 same_ref = phi_arg_ref;
324 phi_ref = phi_arg_ref;
325 if (arg_known_size)
326 minsize = phi_arg_ref.sizrng[0];
327 continue;
328 }
329
330 const bool phi_known_size = (phi_ref.sizrng[0] != 0
331 || phi_ref.sizrng[1] != maxobjsize);
332
333 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
334 minsize = phi_arg_ref.sizrng[0];
335
336 /* Disregard null pointers in PHIs with two or more arguments.
337 TODO: Handle this better! */
338 if (nullp)
339 continue;
340
341 /* Determine the amount of remaining space in the argument. */
342 offset_int argrem[2];
343 argrem[1] = phi_arg_ref.size_remaining (argrem);
344
345 /* Determine the amount of remaining space computed so far and
346 if the remaining space in the argument is more use it instead. */
347 offset_int phirem[2];
348 phirem[1] = phi_ref.size_remaining (phirem);
349
350 if (phi_arg_ref.ref != same_ref.ref)
351 same_ref.ref = NULL_TREE;
352
353 if (phirem[1] < argrem[1]
354 || (phirem[1] == argrem[1]
355 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
356 /* Use the argument with the most space remaining as the result,
357 or the larger one if the space is equal. */
358 phi_ref = phi_arg_ref;
359
360 /* Set SAME_REF.OFFRNG to the maximum range of all arguments. */
361 if (phi_arg_ref.offrng[0] < same_ref.offrng[0])
362 same_ref.offrng[0] = phi_arg_ref.offrng[0];
363 if (same_ref.offrng[1] < phi_arg_ref.offrng[1])
364 same_ref.offrng[1] = phi_arg_ref.offrng[1];
365 }
366
367 if (phi_ref.sizrng[0] < 0)
368 {
369 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
370 (perhaps because they have all been already visited by prior
371 recursive calls). */
372 psnlim->leave_phi (ref);
373 return NULL_TREE;
374 }
375
376 if (!same_ref.ref && same_ref.offrng[0] != 0)
377 /* Clear BASE0 if not all the arguments refer to the same object and
378 if not all their offsets are zero-based. This allows the final
379 PHI offset to out of bounds for some arguments but not for others
380 (or negative even of all the arguments are BASE0), which is overly
381 permissive. */
382 phi_ref.base0 = false;
383
384 if (same_ref.ref)
385 phi_ref = same_ref;
386 else
387 {
388 /* Replace the lower bound of the largest argument with the size
389 of the smallest argument, and set PARMARRAY if any argument
390 was one. */
391 phi_ref.sizrng[0] = minsize;
392 phi_ref.parmarray = parmarray;
393 }
394
395 /* Avoid changing *THIS. */
396 if (pref && pref != this)
397 *pref = phi_ref;
398
399 psnlim->leave_phi (ref);
400
401 return phi_ref.ref;
402 }
403
404 /* Return the maximum amount of space remaining and if non-null, set
405 argument to the minimum. */
406
407 offset_int
408 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
409 {
410 offset_int minbuf;
411 if (!pmin)
412 pmin = &minbuf;
413
414 /* add_offset() ensures the offset range isn't inverted. */
415 gcc_checking_assert (offrng[0] <= offrng[1]);
416
417 if (base0)
418 {
419 /* The offset into referenced object is zero-based (i.e., it's
420 not referenced by a pointer into middle of some unknown object). */
421 if (offrng[0] < 0 && offrng[1] < 0)
422 {
423 /* If the offset is negative the remaining size is zero. */
424 *pmin = 0;
425 return 0;
426 }
427
428 if (sizrng[1] <= offrng[0])
429 {
430 /* If the starting offset is greater than or equal to the upper
431 bound on the size of the object, the space remaining is zero.
432 As a special case, if it's equal, set *PMIN to -1 to let
433 the caller know the offset is valid and just past the end. */
434 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
435 return 0;
436 }
437
438 /* Otherwise return the size minus the lower bound of the offset. */
439 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
440
441 *pmin = sizrng[0] - or0;
442 return sizrng[1] - or0;
443 }
444
445 /* The offset to the referenced object isn't zero-based (i.e., it may
446 refer to a byte other than the first. The size of such an object
447 is constrained only by the size of the address space (the result
448 of max_object_size()). */
449 if (sizrng[1] <= offrng[0])
450 {
451 *pmin = 0;
452 return 0;
453 }
454
455 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
456
457 *pmin = sizrng[0] - or0;
458 return sizrng[1] - or0;
459 }
460
461 /* Add the range [MIN, MAX] to the offset range. For known objects (with
462 zero-based offsets) at least one of whose offset's bounds is in range,
463 constrain the other (or both) to the bounds of the object (i.e., zero
464 and the upper bound of its size). This improves the quality of
465 diagnostics. */
466
467 void access_ref::add_offset (const offset_int &min, const offset_int &max)
468 {
469 if (min <= max)
470 {
471 /* To add an ordinary range just add it to the bounds. */
472 offrng[0] += min;
473 offrng[1] += max;
474 }
475 else if (!base0)
476 {
477 /* To add an inverted range to an offset to an unknown object
478 expand it to the maximum. */
479 add_max_offset ();
480 return;
481 }
482 else
483 {
484 /* To add an inverted range to an offset to an known object set
485 the upper bound to the maximum representable offset value
486 (which may be greater than MAX_OBJECT_SIZE).
487 The lower bound is either the sum of the current offset and
488 MIN when abs(MAX) is greater than the former, or zero otherwise.
489 Zero because then then inverted range includes the negative of
490 the lower bound. */
491 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
492 offrng[1] = maxoff;
493
494 if (max >= 0)
495 {
496 offrng[0] = 0;
497 return;
498 }
499
500 offset_int absmax = wi::abs (max);
501 if (offrng[0] < absmax)
502 {
503 offrng[0] += min;
504 /* Cap the lower bound at the upper (set to MAXOFF above)
505 to avoid inadvertently recreating an inverted range. */
506 if (offrng[1] < offrng[0])
507 offrng[0] = offrng[1];
508 }
509 else
510 offrng[0] = 0;
511 }
512
513 if (!base0)
514 return;
515
516 /* When referencing a known object check to see if the offset computed
517 so far is in bounds... */
518 offset_int remrng[2];
519 remrng[1] = size_remaining (remrng);
520 if (remrng[1] > 0 || remrng[0] < 0)
521 {
522 /* ...if so, constrain it so that neither bound exceeds the size of
523 the object. Out of bounds offsets are left unchanged, and, for
524 better or worse, become in bounds later. They should be detected
525 and diagnosed at the point they first become invalid by
526 -Warray-bounds. */
527 if (offrng[0] < 0)
528 offrng[0] = 0;
529 if (offrng[1] > sizrng[1])
530 offrng[1] = sizrng[1];
531 }
532 }
533
534 /* Set a bit for the PHI in VISITED and return true if it wasn't
535 already set. */
536
537 bool
538 ssa_name_limit_t::visit_phi (tree ssa_name)
539 {
540 if (!visited)
541 visited = BITMAP_ALLOC (NULL);
542
543 /* Return false if SSA_NAME has already been visited. */
544 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
545 }
546
547 /* Clear a bit for the PHI in VISITED. */
548
549 void
550 ssa_name_limit_t::leave_phi (tree ssa_name)
551 {
552 /* Return false if SSA_NAME has already been visited. */
553 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
554 }
555
556 /* Return false if the SSA_NAME chain length counter has reached
557 the limit, otherwise increment the counter and return true. */
558
559 bool
560 ssa_name_limit_t::next ()
561 {
562 /* Return a negative value to let caller avoid recursing beyond
563 the specified limit. */
564 if (ssa_def_max == 0)
565 return false;
566
567 --ssa_def_max;
568 return true;
569 }
570
571 /* If the SSA_NAME has already been "seen" return a positive value.
572 Otherwise add it to VISITED. If the SSA_NAME limit has been
573 reached, return a negative value. Otherwise return zero. */
574
575 int
576 ssa_name_limit_t::next_phi (tree ssa_name)
577 {
578 {
579 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
580 /* Return a positive value if the PHI has already been visited. */
581 if (gimple_code (def_stmt) == GIMPLE_PHI
582 && !visit_phi (ssa_name))
583 return 1;
584 }
585
586 /* Return a negative value to let caller avoid recursing beyond
587 the specified limit. */
588 if (ssa_def_max == 0)
589 return -1;
590
591 --ssa_def_max;
592
593 return 0;
594 }
595
596 ssa_name_limit_t::~ssa_name_limit_t ()
597 {
598 if (visited)
599 BITMAP_FREE (visited);
600 }
601
602 /* Default ctor. Initialize object with pointers to the range_query
603 and cache_type instances to use or null. */
604
605 pointer_query::pointer_query (range_query *qry /* = NULL */,
606 cache_type *cache /* = NULL */)
607 : rvals (qry), var_cache (cache), hits (), misses (),
608 failures (), depth (), max_depth ()
609 {
610 /* No op. */
611 }
612
613 /* Return a pointer to the cached access_ref instance for the SSA_NAME
614 PTR if it's there or null otherwise. */
615
616 const access_ref *
617 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
618 {
619 if (!var_cache)
620 {
621 ++misses;
622 return NULL;
623 }
624
625 unsigned version = SSA_NAME_VERSION (ptr);
626 unsigned idx = version << 1 | (ostype & 1);
627 if (var_cache->indices.length () <= idx)
628 {
629 ++misses;
630 return NULL;
631 }
632
633 unsigned cache_idx = var_cache->indices[idx];
634 if (var_cache->access_refs.length () <= cache_idx)
635 {
636 ++misses;
637 return NULL;
638 }
639
640 access_ref &cache_ref = var_cache->access_refs[cache_idx];
641 if (cache_ref.ref)
642 {
643 ++hits;
644 return &cache_ref;
645 }
646
647 ++misses;
648 return NULL;
649 }
650
651 /* Retrieve the access_ref instance for a variable from the cache if it's
652 there or compute it and insert it into the cache if it's nonnonull. */
653
654 bool
655 pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
656 {
657 const unsigned version
658 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
659
660 if (var_cache && version)
661 {
662 unsigned idx = version << 1 | (ostype & 1);
663 if (idx < var_cache->indices.length ())
664 {
665 unsigned cache_idx = var_cache->indices[idx] - 1;
666 if (cache_idx < var_cache->access_refs.length ()
667 && var_cache->access_refs[cache_idx].ref)
668 {
669 ++hits;
670 *pref = var_cache->access_refs[cache_idx];
671 return true;
672 }
673 }
674
675 ++misses;
676 }
677
678 if (!compute_objsize (ptr, ostype, pref, this))
679 {
680 ++failures;
681 return false;
682 }
683
684 return true;
685 }
686
687 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
688 nonnull. */
689
690 void
691 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
692 {
693 /* Only add populated/valid entries. */
694 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
695 return;
696
697 /* Add REF to the two-level cache. */
698 unsigned version = SSA_NAME_VERSION (ptr);
699 unsigned idx = version << 1 | (ostype & 1);
700
701 /* Grow INDICES if necessary. An index is valid if it's nonzero.
702 Its value minus one is the index into ACCESS_REFS. Not all
703 entries are valid. */
704 if (var_cache->indices.length () <= idx)
705 var_cache->indices.safe_grow_cleared (idx + 1);
706
707 if (!var_cache->indices[idx])
708 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
709
710 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
711 REF member is nonnull. All entries except for the last two
712 are valid. Once nonnull, the REF value must stay unchanged. */
713 unsigned cache_idx = var_cache->indices[idx];
714 if (var_cache->access_refs.length () <= cache_idx)
715 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
716
717 access_ref cache_ref = var_cache->access_refs[cache_idx - 1];
718 if (cache_ref.ref)
719 {
720 gcc_checking_assert (cache_ref.ref == ref.ref);
721 return;
722 }
723
724 cache_ref = ref;
725 }
726
727 /* Flush the cache if it's nonnull. */
728
729 void
730 pointer_query::flush_cache ()
731 {
732 if (!var_cache)
733 return;
734 var_cache->indices.release ();
735 var_cache->access_refs.release ();
736 }
737
738 /* Return true if NAME starts with __builtin_ or __sync_. */
739
740 static bool
741 is_builtin_name (const char *name)
742 {
743 if (strncmp (name, "__builtin_", 10) == 0)
744 return true;
745 if (strncmp (name, "__sync_", 7) == 0)
746 return true;
747 if (strncmp (name, "__atomic_", 9) == 0)
748 return true;
749 return false;
750 }
751
752 /* Return true if NODE should be considered for inline expansion regardless
753 of the optimization level. This means whenever a function is invoked with
754 its "internal" name, which normally contains the prefix "__builtin". */
755
756 bool
757 called_as_built_in (tree node)
758 {
759 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
760 we want the name used to call the function, not the name it
761 will have. */
762 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
763 return is_builtin_name (name);
764 }
765
766 /* Compute values M and N such that M divides (address of EXP - N) and such
767 that N < M. If these numbers can be determined, store M in alignp and N in
768 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
769 *alignp and any bit-offset to *bitposp.
770
771 Note that the address (and thus the alignment) computed here is based
772 on the address to which a symbol resolves, whereas DECL_ALIGN is based
773 on the address at which an object is actually located. These two
774 addresses are not always the same. For example, on ARM targets,
775 the address &foo of a Thumb function foo() has the lowest bit set,
776 whereas foo() itself starts on an even address.
777
778 If ADDR_P is true we are taking the address of the memory reference EXP
779 and thus cannot rely on the access taking place. */
780
781 static bool
782 get_object_alignment_2 (tree exp, unsigned int *alignp,
783 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
784 {
785 poly_int64 bitsize, bitpos;
786 tree offset;
787 machine_mode mode;
788 int unsignedp, reversep, volatilep;
789 unsigned int align = BITS_PER_UNIT;
790 bool known_alignment = false;
791
792 /* Get the innermost object and the constant (bitpos) and possibly
793 variable (offset) offset of the access. */
794 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
795 &unsignedp, &reversep, &volatilep);
796
797 /* Extract alignment information from the innermost object and
798 possibly adjust bitpos and offset. */
799 if (TREE_CODE (exp) == FUNCTION_DECL)
800 {
801 /* Function addresses can encode extra information besides their
802 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
803 allows the low bit to be used as a virtual bit, we know
804 that the address itself must be at least 2-byte aligned. */
805 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
806 align = 2 * BITS_PER_UNIT;
807 }
808 else if (TREE_CODE (exp) == LABEL_DECL)
809 ;
810 else if (TREE_CODE (exp) == CONST_DECL)
811 {
812 /* The alignment of a CONST_DECL is determined by its initializer. */
813 exp = DECL_INITIAL (exp);
814 align = TYPE_ALIGN (TREE_TYPE (exp));
815 if (CONSTANT_CLASS_P (exp))
816 align = targetm.constant_alignment (exp, align);
817
818 known_alignment = true;
819 }
820 else if (DECL_P (exp))
821 {
822 align = DECL_ALIGN (exp);
823 known_alignment = true;
824 }
825 else if (TREE_CODE (exp) == INDIRECT_REF
826 || TREE_CODE (exp) == MEM_REF
827 || TREE_CODE (exp) == TARGET_MEM_REF)
828 {
829 tree addr = TREE_OPERAND (exp, 0);
830 unsigned ptr_align;
831 unsigned HOST_WIDE_INT ptr_bitpos;
832 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
833
834 /* If the address is explicitely aligned, handle that. */
835 if (TREE_CODE (addr) == BIT_AND_EXPR
836 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
837 {
838 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
839 ptr_bitmask *= BITS_PER_UNIT;
840 align = least_bit_hwi (ptr_bitmask);
841 addr = TREE_OPERAND (addr, 0);
842 }
843
844 known_alignment
845 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
846 align = MAX (ptr_align, align);
847
848 /* Re-apply explicit alignment to the bitpos. */
849 ptr_bitpos &= ptr_bitmask;
850
851 /* The alignment of the pointer operand in a TARGET_MEM_REF
852 has to take the variable offset parts into account. */
853 if (TREE_CODE (exp) == TARGET_MEM_REF)
854 {
855 if (TMR_INDEX (exp))
856 {
857 unsigned HOST_WIDE_INT step = 1;
858 if (TMR_STEP (exp))
859 step = TREE_INT_CST_LOW (TMR_STEP (exp));
860 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
861 }
862 if (TMR_INDEX2 (exp))
863 align = BITS_PER_UNIT;
864 known_alignment = false;
865 }
866
867 /* When EXP is an actual memory reference then we can use
868 TYPE_ALIGN of a pointer indirection to derive alignment.
869 Do so only if get_pointer_alignment_1 did not reveal absolute
870 alignment knowledge and if using that alignment would
871 improve the situation. */
872 unsigned int talign;
873 if (!addr_p && !known_alignment
874 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
875 && talign > align)
876 align = talign;
877 else
878 {
879 /* Else adjust bitpos accordingly. */
880 bitpos += ptr_bitpos;
881 if (TREE_CODE (exp) == MEM_REF
882 || TREE_CODE (exp) == TARGET_MEM_REF)
883 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
884 }
885 }
886 else if (TREE_CODE (exp) == STRING_CST)
887 {
888 /* STRING_CST are the only constant objects we allow to be not
889 wrapped inside a CONST_DECL. */
890 align = TYPE_ALIGN (TREE_TYPE (exp));
891 if (CONSTANT_CLASS_P (exp))
892 align = targetm.constant_alignment (exp, align);
893
894 known_alignment = true;
895 }
896
897 /* If there is a non-constant offset part extract the maximum
898 alignment that can prevail. */
899 if (offset)
900 {
901 unsigned int trailing_zeros = tree_ctz (offset);
902 if (trailing_zeros < HOST_BITS_PER_INT)
903 {
904 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
905 if (inner)
906 align = MIN (align, inner);
907 }
908 }
909
910 /* Account for the alignment of runtime coefficients, so that the constant
911 bitpos is guaranteed to be accurate. */
912 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
913 if (alt_align != 0 && alt_align < align)
914 {
915 align = alt_align;
916 known_alignment = false;
917 }
918
919 *alignp = align;
920 *bitposp = bitpos.coeffs[0] & (align - 1);
921 return known_alignment;
922 }
923
924 /* For a memory reference expression EXP compute values M and N such that M
925 divides (&EXP - N) and such that N < M. If these numbers can be determined,
926 store M in alignp and N in *BITPOSP and return true. Otherwise return false
927 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
928
929 bool
930 get_object_alignment_1 (tree exp, unsigned int *alignp,
931 unsigned HOST_WIDE_INT *bitposp)
932 {
933 return get_object_alignment_2 (exp, alignp, bitposp, false);
934 }
935
936 /* Return the alignment in bits of EXP, an object. */
937
938 unsigned int
939 get_object_alignment (tree exp)
940 {
941 unsigned HOST_WIDE_INT bitpos = 0;
942 unsigned int align;
943
944 get_object_alignment_1 (exp, &align, &bitpos);
945
946 /* align and bitpos now specify known low bits of the pointer.
947 ptr & (align - 1) == bitpos. */
948
949 if (bitpos != 0)
950 align = least_bit_hwi (bitpos);
951 return align;
952 }
953
954 /* For a pointer valued expression EXP compute values M and N such that M
955 divides (EXP - N) and such that N < M. If these numbers can be determined,
956 store M in alignp and N in *BITPOSP and return true. Return false if
957 the results are just a conservative approximation.
958
959 If EXP is not a pointer, false is returned too. */
960
961 bool
962 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
963 unsigned HOST_WIDE_INT *bitposp)
964 {
965 STRIP_NOPS (exp);
966
967 if (TREE_CODE (exp) == ADDR_EXPR)
968 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
969 alignp, bitposp, true);
970 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
971 {
972 unsigned int align;
973 unsigned HOST_WIDE_INT bitpos;
974 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
975 &align, &bitpos);
976 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
977 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
978 else
979 {
980 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
981 if (trailing_zeros < HOST_BITS_PER_INT)
982 {
983 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
984 if (inner)
985 align = MIN (align, inner);
986 }
987 }
988 *alignp = align;
989 *bitposp = bitpos & (align - 1);
990 return res;
991 }
992 else if (TREE_CODE (exp) == SSA_NAME
993 && POINTER_TYPE_P (TREE_TYPE (exp)))
994 {
995 unsigned int ptr_align, ptr_misalign;
996 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
997
998 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
999 {
1000 *bitposp = ptr_misalign * BITS_PER_UNIT;
1001 *alignp = ptr_align * BITS_PER_UNIT;
1002 /* Make sure to return a sensible alignment when the multiplication
1003 by BITS_PER_UNIT overflowed. */
1004 if (*alignp == 0)
1005 *alignp = 1u << (HOST_BITS_PER_INT - 1);
1006 /* We cannot really tell whether this result is an approximation. */
1007 return false;
1008 }
1009 else
1010 {
1011 *bitposp = 0;
1012 *alignp = BITS_PER_UNIT;
1013 return false;
1014 }
1015 }
1016 else if (TREE_CODE (exp) == INTEGER_CST)
1017 {
1018 *alignp = BIGGEST_ALIGNMENT;
1019 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
1020 & (BIGGEST_ALIGNMENT - 1));
1021 return true;
1022 }
1023
1024 *bitposp = 0;
1025 *alignp = BITS_PER_UNIT;
1026 return false;
1027 }
1028
1029 /* Return the alignment in bits of EXP, a pointer valued expression.
1030 The alignment returned is, by default, the alignment of the thing that
1031 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
1032
1033 Otherwise, look at the expression to see if we can do better, i.e., if the
1034 expression is actually pointing at an object whose alignment is tighter. */
1035
1036 unsigned int
1037 get_pointer_alignment (tree exp)
1038 {
1039 unsigned HOST_WIDE_INT bitpos = 0;
1040 unsigned int align;
1041
1042 get_pointer_alignment_1 (exp, &align, &bitpos);
1043
1044 /* align and bitpos now specify known low bits of the pointer.
1045 ptr & (align - 1) == bitpos. */
1046
1047 if (bitpos != 0)
1048 align = least_bit_hwi (bitpos);
1049
1050 return align;
1051 }
1052
1053 /* Return the number of leading non-zero elements in the sequence
1054 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
1055 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
1056
1057 unsigned
1058 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
1059 {
1060 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1061
1062 unsigned n;
1063
1064 if (eltsize == 1)
1065 {
1066 /* Optimize the common case of plain char. */
1067 for (n = 0; n < maxelts; n++)
1068 {
1069 const char *elt = (const char*) ptr + n;
1070 if (!*elt)
1071 break;
1072 }
1073 }
1074 else
1075 {
1076 for (n = 0; n < maxelts; n++)
1077 {
1078 const char *elt = (const char*) ptr + n * eltsize;
1079 if (!memcmp (elt, "\0\0\0\0", eltsize))
1080 break;
1081 }
1082 }
1083 return n;
1084 }
1085
1086 /* For a call EXPR at LOC to a function FNAME that expects a string
1087 in the argument ARG, issue a diagnostic due to it being a called
1088 with an argument that is a character array with no terminating
1089 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
1090 of characters in which the NUL is expected. Either EXPR or FNAME
1091 may be null but noth both. SIZE may be null when BNDRNG is null. */
1092
1093 void
1094 warn_string_no_nul (location_t loc, tree expr, const char *fname,
1095 tree arg, tree decl, tree size /* = NULL_TREE */,
1096 bool exact /* = false */,
1097 const wide_int bndrng[2] /* = NULL */)
1098 {
1099 if ((expr && TREE_NO_WARNING (expr)) || TREE_NO_WARNING (arg))
1100 return;
1101
1102 loc = expansion_point_location_if_in_system_header (loc);
1103 bool warned;
1104
1105 /* Format the bound range as a string to keep the nuber of messages
1106 from exploding. */
1107 char bndstr[80];
1108 *bndstr = 0;
1109 if (bndrng)
1110 {
1111 if (bndrng[0] == bndrng[1])
1112 sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
1113 else
1114 sprintf (bndstr, "[%llu, %llu]",
1115 (unsigned long long) bndrng[0].to_uhwi (),
1116 (unsigned long long) bndrng[1].to_uhwi ());
1117 }
1118
1119 const tree maxobjsize = max_object_size ();
1120 const wide_int maxsiz = wi::to_wide (maxobjsize);
1121 if (expr)
1122 {
1123 tree func = get_callee_fndecl (expr);
1124 if (bndrng)
1125 {
1126 if (wi::ltu_p (maxsiz, bndrng[0]))
1127 warned = warning_at (loc, OPT_Wstringop_overread,
1128 "%K%qD specified bound %s exceeds "
1129 "maximum object size %E",
1130 expr, func, bndstr, maxobjsize);
1131 else
1132 {
1133 bool maybe = wi::to_wide (size) == bndrng[0];
1134 warned = warning_at (loc, OPT_Wstringop_overread,
1135 exact
1136 ? G_("%K%qD specified bound %s exceeds "
1137 "the size %E of unterminated array")
1138 : (maybe
1139 ? G_("%K%qD specified bound %s may "
1140 "exceed the size of at most %E "
1141 "of unterminated array")
1142 : G_("%K%qD specified bound %s exceeds "
1143 "the size of at most %E "
1144 "of unterminated array")),
1145 expr, func, bndstr, size);
1146 }
1147 }
1148 else
1149 warned = warning_at (loc, OPT_Wstringop_overread,
1150 "%K%qD argument missing terminating nul",
1151 expr, func);
1152 }
1153 else
1154 {
1155 if (bndrng)
1156 {
1157 if (wi::ltu_p (maxsiz, bndrng[0]))
1158 warned = warning_at (loc, OPT_Wstringop_overread,
1159 "%qs specified bound %s exceeds "
1160 "maximum object size %E",
1161 fname, bndstr, maxobjsize);
1162 else
1163 {
1164 bool maybe = wi::to_wide (size) == bndrng[0];
1165 warned = warning_at (loc, OPT_Wstringop_overread,
1166 exact
1167 ? G_("%qs specified bound %s exceeds "
1168 "the size %E of unterminated array")
1169 : (maybe
1170 ? G_("%qs specified bound %s may "
1171 "exceed the size of at most %E "
1172 "of unterminated array")
1173 : G_("%qs specified bound %s exceeds "
1174 "the size of at most %E "
1175 "of unterminated array")),
1176 fname, bndstr, size);
1177 }
1178 }
1179 else
1180 warned = warning_at (loc, OPT_Wstringop_overread,
1181 "%qsargument missing terminating nul",
1182 fname);
1183 }
1184
1185 if (warned)
1186 {
1187 inform (DECL_SOURCE_LOCATION (decl),
1188 "referenced argument declared here");
1189 TREE_NO_WARNING (arg) = 1;
1190 if (expr)
1191 TREE_NO_WARNING (expr) = 1;
1192 }
1193 }
1194
1195 /* For a call EXPR (which may be null) that expects a string argument
1196 SRC as an argument, returns false if SRC is a character array with
1197 no terminating NUL. When nonnull, BOUND is the number of characters
1198 in which to expect the terminating NUL. RDONLY is true for read-only
1199 accesses such as strcmp, false for read-write such as strcpy. When
1200 EXPR is also issues a warning. */
1201
1202 bool
1203 check_nul_terminated_array (tree expr, tree src,
1204 tree bound /* = NULL_TREE */)
1205 {
1206 /* The constant size of the array SRC points to. The actual size
1207 may be less of EXACT is true, but not more. */
1208 tree size;
1209 /* True if SRC involves a non-constant offset into the array. */
1210 bool exact;
1211 /* The unterminated constant array SRC points to. */
1212 tree nonstr = unterminated_array (src, &size, &exact);
1213 if (!nonstr)
1214 return true;
1215
1216 /* NONSTR refers to the non-nul terminated constant array and SIZE
1217 is the constant size of the array in bytes. EXACT is true when
1218 SIZE is exact. */
1219
1220 wide_int bndrng[2];
1221 if (bound)
1222 {
1223 if (TREE_CODE (bound) == INTEGER_CST)
1224 bndrng[0] = bndrng[1] = wi::to_wide (bound);
1225 else
1226 {
1227 value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1);
1228 if (rng != VR_RANGE)
1229 return true;
1230 }
1231
1232 if (exact)
1233 {
1234 if (wi::leu_p (bndrng[0], wi::to_wide (size)))
1235 return true;
1236 }
1237 else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
1238 return true;
1239 }
1240
1241 if (expr)
1242 warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr,
1243 size, exact, bound ? bndrng : NULL);
1244
1245 return false;
1246 }
1247
1248 /* If EXP refers to an unterminated constant character array return
1249 the declaration of the object of which the array is a member or
1250 element and if SIZE is not null, set *SIZE to the size of
1251 the unterminated array and set *EXACT if the size is exact or
1252 clear it otherwise. Otherwise return null. */
1253
1254 tree
1255 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
1256 {
1257 /* C_STRLEN will return NULL and set DECL in the info
1258 structure if EXP references a unterminated array. */
1259 c_strlen_data lendata = { };
1260 tree len = c_strlen (exp, 1, &lendata);
1261 if (len == NULL_TREE && lendata.minlen && lendata.decl)
1262 {
1263 if (size)
1264 {
1265 len = lendata.minlen;
1266 if (lendata.off)
1267 {
1268 /* Constant offsets are already accounted for in LENDATA.MINLEN,
1269 but not in a SSA_NAME + CST expression. */
1270 if (TREE_CODE (lendata.off) == INTEGER_CST)
1271 *exact = true;
1272 else if (TREE_CODE (lendata.off) == PLUS_EXPR
1273 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
1274 {
1275 /* Subtract the offset from the size of the array. */
1276 *exact = false;
1277 tree temp = TREE_OPERAND (lendata.off, 1);
1278 temp = fold_convert (ssizetype, temp);
1279 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
1280 }
1281 else
1282 *exact = false;
1283 }
1284 else
1285 *exact = true;
1286
1287 *size = len;
1288 }
1289 return lendata.decl;
1290 }
1291
1292 return NULL_TREE;
1293 }
1294
1295 /* Compute the length of a null-terminated character string or wide
1296 character string handling character sizes of 1, 2, and 4 bytes.
1297 TREE_STRING_LENGTH is not the right way because it evaluates to
1298 the size of the character array in bytes (as opposed to characters)
1299 and because it can contain a zero byte in the middle.
1300
1301 ONLY_VALUE should be nonzero if the result is not going to be emitted
1302 into the instruction stream and zero if it is going to be expanded.
1303 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
1304 is returned, otherwise NULL, since
1305 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
1306 evaluate the side-effects.
1307
1308 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
1309 accesses. Note that this implies the result is not going to be emitted
1310 into the instruction stream.
1311
1312 Additional information about the string accessed may be recorded
1313 in DATA. For example, if ARG references an unterminated string,
1314 then the declaration will be stored in the DECL field. If the
1315 length of the unterminated string can be determined, it'll be
1316 stored in the LEN field. Note this length could well be different
1317 than what a C strlen call would return.
1318
1319 ELTSIZE is 1 for normal single byte character strings, and 2 or
1320 4 for wide characer strings. ELTSIZE is by default 1.
1321
1322 The value returned is of type `ssizetype'. */
1323
1324 tree
1325 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
1326 {
1327 /* If we were not passed a DATA pointer, then get one to a local
1328 structure. That avoids having to check DATA for NULL before
1329 each time we want to use it. */
1330 c_strlen_data local_strlen_data = { };
1331 if (!data)
1332 data = &local_strlen_data;
1333
1334 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1335
1336 tree src = STRIP_NOPS (arg);
1337 if (TREE_CODE (src) == COND_EXPR
1338 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1339 {
1340 tree len1, len2;
1341
1342 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1343 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
1344 if (tree_int_cst_equal (len1, len2))
1345 return len1;
1346 }
1347
1348 if (TREE_CODE (src) == COMPOUND_EXPR
1349 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1350 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1351
1352 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
1353
1354 /* Offset from the beginning of the string in bytes. */
1355 tree byteoff;
1356 tree memsize;
1357 tree decl;
1358 src = string_constant (src, &byteoff, &memsize, &decl);
1359 if (src == 0)
1360 return NULL_TREE;
1361
1362 /* Determine the size of the string element. */
1363 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
1364 return NULL_TREE;
1365
1366 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
1367 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
1368 in case the latter is less than the size of the array, such as when
1369 SRC refers to a short string literal used to initialize a large array.
1370 In that case, the elements of the array after the terminating NUL are
1371 all NUL. */
1372 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
1373 strelts = strelts / eltsize;
1374
1375 if (!tree_fits_uhwi_p (memsize))
1376 return NULL_TREE;
1377
1378 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
1379
1380 /* PTR can point to the byte representation of any string type, including
1381 char* and wchar_t*. */
1382 const char *ptr = TREE_STRING_POINTER (src);
1383
1384 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
1385 {
1386 /* The code below works only for single byte character types. */
1387 if (eltsize != 1)
1388 return NULL_TREE;
1389
1390 /* If the string has an internal NUL character followed by any
1391 non-NUL characters (e.g., "foo\0bar"), we can't compute
1392 the offset to the following NUL if we don't know where to
1393 start searching for it. */
1394 unsigned len = string_length (ptr, eltsize, strelts);
1395
1396 /* Return when an embedded null character is found or none at all.
1397 In the latter case, set the DECL/LEN field in the DATA structure
1398 so that callers may examine them. */
1399 if (len + 1 < strelts)
1400 return NULL_TREE;
1401 else if (len >= maxelts)
1402 {
1403 data->decl = decl;
1404 data->off = byteoff;
1405 data->minlen = ssize_int (len);
1406 return NULL_TREE;
1407 }
1408
1409 /* For empty strings the result should be zero. */
1410 if (len == 0)
1411 return ssize_int (0);
1412
1413 /* We don't know the starting offset, but we do know that the string
1414 has no internal zero bytes. If the offset falls within the bounds
1415 of the string subtract the offset from the length of the string,
1416 and return that. Otherwise the length is zero. Take care to
1417 use SAVE_EXPR in case the OFFSET has side-effects. */
1418 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
1419 : byteoff;
1420 offsave = fold_convert_loc (loc, sizetype, offsave);
1421 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
1422 size_int (len));
1423 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
1424 offsave);
1425 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
1426 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
1427 build_zero_cst (ssizetype));
1428 }
1429
1430 /* Offset from the beginning of the string in elements. */
1431 HOST_WIDE_INT eltoff;
1432
1433 /* We have a known offset into the string. Start searching there for
1434 a null character if we can represent it as a single HOST_WIDE_INT. */
1435 if (byteoff == 0)
1436 eltoff = 0;
1437 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1438 eltoff = -1;
1439 else
1440 eltoff = tree_to_uhwi (byteoff) / eltsize;
1441
1442 /* If the offset is known to be out of bounds, warn, and call strlen at
1443 runtime. */
1444 if (eltoff < 0 || eltoff >= maxelts)
1445 {
1446 /* Suppress multiple warnings for propagated constant strings. */
1447 if (only_value != 2
1448 && !TREE_NO_WARNING (arg)
1449 && warning_at (loc, OPT_Warray_bounds,
1450 "offset %qwi outside bounds of constant string",
1451 eltoff))
1452 {
1453 if (decl)
1454 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
1455 TREE_NO_WARNING (arg) = 1;
1456 }
1457 return NULL_TREE;
1458 }
1459
1460 /* If eltoff is larger than strelts but less than maxelts the
1461 string length is zero, since the excess memory will be zero. */
1462 if (eltoff > strelts)
1463 return ssize_int (0);
1464
1465 /* Use strlen to search for the first zero byte. Since any strings
1466 constructed with build_string will have nulls appended, we win even
1467 if we get handed something like (char[4])"abcd".
1468
1469 Since ELTOFF is our starting index into the string, no further
1470 calculation is needed. */
1471 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
1472 strelts - eltoff);
1473
1474 /* Don't know what to return if there was no zero termination.
1475 Ideally this would turn into a gcc_checking_assert over time.
1476 Set DECL/LEN so callers can examine them. */
1477 if (len >= maxelts - eltoff)
1478 {
1479 data->decl = decl;
1480 data->off = byteoff;
1481 data->minlen = ssize_int (len);
1482 return NULL_TREE;
1483 }
1484
1485 return ssize_int (len);
1486 }
1487
1488 /* Return a constant integer corresponding to target reading
1489 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
1490 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
1491 are assumed to be zero, otherwise it reads as many characters
1492 as needed. */
1493
1494 rtx
1495 c_readstr (const char *str, scalar_int_mode mode,
1496 bool null_terminated_p/*=true*/)
1497 {
1498 HOST_WIDE_INT ch;
1499 unsigned int i, j;
1500 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
1501
1502 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
1503 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
1504 / HOST_BITS_PER_WIDE_INT;
1505
1506 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
1507 for (i = 0; i < len; i++)
1508 tmp[i] = 0;
1509
1510 ch = 1;
1511 for (i = 0; i < GET_MODE_SIZE (mode); i++)
1512 {
1513 j = i;
1514 if (WORDS_BIG_ENDIAN)
1515 j = GET_MODE_SIZE (mode) - i - 1;
1516 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
1517 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1518 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
1519 j *= BITS_PER_UNIT;
1520
1521 if (ch || !null_terminated_p)
1522 ch = (unsigned char) str[i];
1523 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
1524 }
1525
1526 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1527 return immed_wide_int_const (c, mode);
1528 }
1529
1530 /* Cast a target constant CST to target CHAR and if that value fits into
1531 host char type, return zero and put that value into variable pointed to by
1532 P. */
1533
1534 static int
1535 target_char_cast (tree cst, char *p)
1536 {
1537 unsigned HOST_WIDE_INT val, hostval;
1538
1539 if (TREE_CODE (cst) != INTEGER_CST
1540 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
1541 return 1;
1542
1543 /* Do not care if it fits or not right here. */
1544 val = TREE_INT_CST_LOW (cst);
1545
1546 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
1547 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
1548
1549 hostval = val;
1550 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
1551 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
1552
1553 if (val != hostval)
1554 return 1;
1555
1556 *p = hostval;
1557 return 0;
1558 }
1559
1560 /* Similar to save_expr, but assumes that arbitrary code is not executed
1561 in between the multiple evaluations. In particular, we assume that a
1562 non-addressable local variable will not be modified. */
1563
1564 static tree
1565 builtin_save_expr (tree exp)
1566 {
1567 if (TREE_CODE (exp) == SSA_NAME
1568 || (TREE_ADDRESSABLE (exp) == 0
1569 && (TREE_CODE (exp) == PARM_DECL
1570 || (VAR_P (exp) && !TREE_STATIC (exp)))))
1571 return exp;
1572
1573 return save_expr (exp);
1574 }
1575
1576 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1577 times to get the address of either a higher stack frame, or a return
1578 address located within it (depending on FNDECL_CODE). */
1579
1580 static rtx
1581 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
1582 {
1583 int i;
1584 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
1585 if (tem == NULL_RTX)
1586 {
1587 /* For a zero count with __builtin_return_address, we don't care what
1588 frame address we return, because target-specific definitions will
1589 override us. Therefore frame pointer elimination is OK, and using
1590 the soft frame pointer is OK.
1591
1592 For a nonzero count, or a zero count with __builtin_frame_address,
1593 we require a stable offset from the current frame pointer to the
1594 previous one, so we must use the hard frame pointer, and
1595 we must disable frame pointer elimination. */
1596 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1597 tem = frame_pointer_rtx;
1598 else
1599 {
1600 tem = hard_frame_pointer_rtx;
1601
1602 /* Tell reload not to eliminate the frame pointer. */
1603 crtl->accesses_prior_frames = 1;
1604 }
1605 }
1606
1607 if (count > 0)
1608 SETUP_FRAME_ADDRESSES ();
1609
1610 /* On the SPARC, the return address is not in the frame, it is in a
1611 register. There is no way to access it off of the current frame
1612 pointer, but it can be accessed off the previous frame pointer by
1613 reading the value from the register window save area. */
1614 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1615 count--;
1616
1617 /* Scan back COUNT frames to the specified frame. */
1618 for (i = 0; i < count; i++)
1619 {
1620 /* Assume the dynamic chain pointer is in the word that the
1621 frame address points to, unless otherwise specified. */
1622 tem = DYNAMIC_CHAIN_ADDRESS (tem);
1623 tem = memory_address (Pmode, tem);
1624 tem = gen_frame_mem (Pmode, tem);
1625 tem = copy_to_reg (tem);
1626 }
1627
1628 /* For __builtin_frame_address, return what we've got. But, on
1629 the SPARC for example, we may have to add a bias. */
1630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
1631 return FRAME_ADDR_RTX (tem);
1632
1633 /* For __builtin_return_address, get the return address from that frame. */
1634 #ifdef RETURN_ADDR_RTX
1635 tem = RETURN_ADDR_RTX (count, tem);
1636 #else
1637 tem = memory_address (Pmode,
1638 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1639 tem = gen_frame_mem (Pmode, tem);
1640 #endif
1641 return tem;
1642 }
1643
1644 /* Alias set used for setjmp buffer. */
1645 static alias_set_type setjmp_alias_set = -1;
1646
1647 /* Construct the leading half of a __builtin_setjmp call. Control will
1648 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1649 exception handling code. */
1650
1651 void
1652 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1653 {
1654 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1655 rtx stack_save;
1656 rtx mem;
1657
1658 if (setjmp_alias_set == -1)
1659 setjmp_alias_set = new_alias_set ();
1660
1661 buf_addr = convert_memory_address (Pmode, buf_addr);
1662
1663 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1664
1665 /* We store the frame pointer and the address of receiver_label in
1666 the buffer and use the rest of it for the stack save area, which
1667 is machine-dependent. */
1668
1669 mem = gen_rtx_MEM (Pmode, buf_addr);
1670 set_mem_alias_set (mem, setjmp_alias_set);
1671 emit_move_insn (mem, hard_frame_pointer_rtx);
1672
1673 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1674 GET_MODE_SIZE (Pmode))),
1675 set_mem_alias_set (mem, setjmp_alias_set);
1676
1677 emit_move_insn (validize_mem (mem),
1678 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1679
1680 stack_save = gen_rtx_MEM (sa_mode,
1681 plus_constant (Pmode, buf_addr,
1682 2 * GET_MODE_SIZE (Pmode)));
1683 set_mem_alias_set (stack_save, setjmp_alias_set);
1684 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1685
1686 /* If there is further processing to do, do it. */
1687 if (targetm.have_builtin_setjmp_setup ())
1688 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1689
1690 /* We have a nonlocal label. */
1691 cfun->has_nonlocal_label = 1;
1692 }
1693
1694 /* Construct the trailing part of a __builtin_setjmp call. This is
1695 also called directly by the SJLJ exception handling code.
1696 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1697
1698 void
1699 expand_builtin_setjmp_receiver (rtx receiver_label)
1700 {
1701 rtx chain;
1702
1703 /* Mark the FP as used when we get here, so we have to make sure it's
1704 marked as used by this function. */
1705 emit_use (hard_frame_pointer_rtx);
1706
1707 /* Mark the static chain as clobbered here so life information
1708 doesn't get messed up for it. */
1709 chain = rtx_for_static_chain (current_function_decl, true);
1710 if (chain && REG_P (chain))
1711 emit_clobber (chain);
1712
1713 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1714 {
1715 /* If the argument pointer can be eliminated in favor of the
1716 frame pointer, we don't need to restore it. We assume here
1717 that if such an elimination is present, it can always be used.
1718 This is the case on all known machines; if we don't make this
1719 assumption, we do unnecessary saving on many machines. */
1720 size_t i;
1721 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1722
1723 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1724 if (elim_regs[i].from == ARG_POINTER_REGNUM
1725 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1726 break;
1727
1728 if (i == ARRAY_SIZE (elim_regs))
1729 {
1730 /* Now restore our arg pointer from the address at which it
1731 was saved in our stack frame. */
1732 emit_move_insn (crtl->args.internal_arg_pointer,
1733 copy_to_reg (get_arg_pointer_save_area ()));
1734 }
1735 }
1736
1737 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1738 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1739 else if (targetm.have_nonlocal_goto_receiver ())
1740 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1741 else
1742 { /* Nothing */ }
1743
1744 /* We must not allow the code we just generated to be reordered by
1745 scheduling. Specifically, the update of the frame pointer must
1746 happen immediately, not later. */
1747 emit_insn (gen_blockage ());
1748 }
1749
1750 /* __builtin_longjmp is passed a pointer to an array of five words (not
1751 all will be used on all machines). It operates similarly to the C
1752 library function of the same name, but is more efficient. Much of
1753 the code below is copied from the handling of non-local gotos. */
1754
1755 static void
1756 expand_builtin_longjmp (rtx buf_addr, rtx value)
1757 {
1758 rtx fp, lab, stack;
1759 rtx_insn *insn, *last;
1760 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1761
1762 /* DRAP is needed for stack realign if longjmp is expanded to current
1763 function */
1764 if (SUPPORTS_STACK_ALIGNMENT)
1765 crtl->need_drap = true;
1766
1767 if (setjmp_alias_set == -1)
1768 setjmp_alias_set = new_alias_set ();
1769
1770 buf_addr = convert_memory_address (Pmode, buf_addr);
1771
1772 buf_addr = force_reg (Pmode, buf_addr);
1773
1774 /* We require that the user must pass a second argument of 1, because
1775 that is what builtin_setjmp will return. */
1776 gcc_assert (value == const1_rtx);
1777
1778 last = get_last_insn ();
1779 if (targetm.have_builtin_longjmp ())
1780 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1781 else
1782 {
1783 fp = gen_rtx_MEM (Pmode, buf_addr);
1784 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1785 GET_MODE_SIZE (Pmode)));
1786
1787 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1788 2 * GET_MODE_SIZE (Pmode)));
1789 set_mem_alias_set (fp, setjmp_alias_set);
1790 set_mem_alias_set (lab, setjmp_alias_set);
1791 set_mem_alias_set (stack, setjmp_alias_set);
1792
1793 /* Pick up FP, label, and SP from the block and jump. This code is
1794 from expand_goto in stmt.c; see there for detailed comments. */
1795 if (targetm.have_nonlocal_goto ())
1796 /* We have to pass a value to the nonlocal_goto pattern that will
1797 get copied into the static_chain pointer, but it does not matter
1798 what that value is, because builtin_setjmp does not use it. */
1799 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1800 else
1801 {
1802 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1803 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1804
1805 lab = copy_to_reg (lab);
1806
1807 /* Restore the frame pointer and stack pointer. We must use a
1808 temporary since the setjmp buffer may be a local. */
1809 fp = copy_to_reg (fp);
1810 emit_stack_restore (SAVE_NONLOCAL, stack);
1811
1812 /* Ensure the frame pointer move is not optimized. */
1813 emit_insn (gen_blockage ());
1814 emit_clobber (hard_frame_pointer_rtx);
1815 emit_clobber (frame_pointer_rtx);
1816 emit_move_insn (hard_frame_pointer_rtx, fp);
1817
1818 emit_use (hard_frame_pointer_rtx);
1819 emit_use (stack_pointer_rtx);
1820 emit_indirect_jump (lab);
1821 }
1822 }
1823
1824 /* Search backwards and mark the jump insn as a non-local goto.
1825 Note that this precludes the use of __builtin_longjmp to a
1826 __builtin_setjmp target in the same function. However, we've
1827 already cautioned the user that these functions are for
1828 internal exception handling use only. */
1829 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1830 {
1831 gcc_assert (insn != last);
1832
1833 if (JUMP_P (insn))
1834 {
1835 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1836 break;
1837 }
1838 else if (CALL_P (insn))
1839 break;
1840 }
1841 }
1842
1843 static inline bool
1844 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1845 {
1846 return (iter->i < iter->n);
1847 }
1848
1849 /* This function validates the types of a function call argument list
1850 against a specified list of tree_codes. If the last specifier is a 0,
1851 that represents an ellipsis, otherwise the last specifier must be a
1852 VOID_TYPE. */
1853
1854 static bool
1855 validate_arglist (const_tree callexpr, ...)
1856 {
1857 enum tree_code code;
1858 bool res = 0;
1859 va_list ap;
1860 const_call_expr_arg_iterator iter;
1861 const_tree arg;
1862
1863 va_start (ap, callexpr);
1864 init_const_call_expr_arg_iterator (callexpr, &iter);
1865
1866 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1867 tree fn = CALL_EXPR_FN (callexpr);
1868 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1869
1870 for (unsigned argno = 1; ; ++argno)
1871 {
1872 code = (enum tree_code) va_arg (ap, int);
1873
1874 switch (code)
1875 {
1876 case 0:
1877 /* This signifies an ellipses, any further arguments are all ok. */
1878 res = true;
1879 goto end;
1880 case VOID_TYPE:
1881 /* This signifies an endlink, if no arguments remain, return
1882 true, otherwise return false. */
1883 res = !more_const_call_expr_args_p (&iter);
1884 goto end;
1885 case POINTER_TYPE:
1886 /* The actual argument must be nonnull when either the whole
1887 called function has been declared nonnull, or when the formal
1888 argument corresponding to the actual argument has been. */
1889 if (argmap
1890 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1891 {
1892 arg = next_const_call_expr_arg (&iter);
1893 if (!validate_arg (arg, code) || integer_zerop (arg))
1894 goto end;
1895 break;
1896 }
1897 /* FALLTHRU */
1898 default:
1899 /* If no parameters remain or the parameter's code does not
1900 match the specified code, return false. Otherwise continue
1901 checking any remaining arguments. */
1902 arg = next_const_call_expr_arg (&iter);
1903 if (!validate_arg (arg, code))
1904 goto end;
1905 break;
1906 }
1907 }
1908
1909 /* We need gotos here since we can only have one VA_CLOSE in a
1910 function. */
1911 end: ;
1912 va_end (ap);
1913
1914 BITMAP_FREE (argmap);
1915
1916 return res;
1917 }
1918
1919 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1920 and the address of the save area. */
1921
1922 static rtx
1923 expand_builtin_nonlocal_goto (tree exp)
1924 {
1925 tree t_label, t_save_area;
1926 rtx r_label, r_save_area, r_fp, r_sp;
1927 rtx_insn *insn;
1928
1929 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1930 return NULL_RTX;
1931
1932 t_label = CALL_EXPR_ARG (exp, 0);
1933 t_save_area = CALL_EXPR_ARG (exp, 1);
1934
1935 r_label = expand_normal (t_label);
1936 r_label = convert_memory_address (Pmode, r_label);
1937 r_save_area = expand_normal (t_save_area);
1938 r_save_area = convert_memory_address (Pmode, r_save_area);
1939 /* Copy the address of the save location to a register just in case it was
1940 based on the frame pointer. */
1941 r_save_area = copy_to_reg (r_save_area);
1942 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1943 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1944 plus_constant (Pmode, r_save_area,
1945 GET_MODE_SIZE (Pmode)));
1946
1947 crtl->has_nonlocal_goto = 1;
1948
1949 /* ??? We no longer need to pass the static chain value, afaik. */
1950 if (targetm.have_nonlocal_goto ())
1951 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1952 else
1953 {
1954 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1955 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1956
1957 r_label = copy_to_reg (r_label);
1958
1959 /* Restore the frame pointer and stack pointer. We must use a
1960 temporary since the setjmp buffer may be a local. */
1961 r_fp = copy_to_reg (r_fp);
1962 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1963
1964 /* Ensure the frame pointer move is not optimized. */
1965 emit_insn (gen_blockage ());
1966 emit_clobber (hard_frame_pointer_rtx);
1967 emit_clobber (frame_pointer_rtx);
1968 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1969
1970 /* USE of hard_frame_pointer_rtx added for consistency;
1971 not clear if really needed. */
1972 emit_use (hard_frame_pointer_rtx);
1973 emit_use (stack_pointer_rtx);
1974
1975 /* If the architecture is using a GP register, we must
1976 conservatively assume that the target function makes use of it.
1977 The prologue of functions with nonlocal gotos must therefore
1978 initialize the GP register to the appropriate value, and we
1979 must then make sure that this value is live at the point
1980 of the jump. (Note that this doesn't necessarily apply
1981 to targets with a nonlocal_goto pattern; they are free
1982 to implement it in their own way. Note also that this is
1983 a no-op if the GP register is a global invariant.) */
1984 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1985 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1986 emit_use (pic_offset_table_rtx);
1987
1988 emit_indirect_jump (r_label);
1989 }
1990
1991 /* Search backwards to the jump insn and mark it as a
1992 non-local goto. */
1993 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1994 {
1995 if (JUMP_P (insn))
1996 {
1997 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1998 break;
1999 }
2000 else if (CALL_P (insn))
2001 break;
2002 }
2003
2004 return const0_rtx;
2005 }
2006
2007 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
2008 (not all will be used on all machines) that was passed to __builtin_setjmp.
2009 It updates the stack pointer in that block to the current value. This is
2010 also called directly by the SJLJ exception handling code. */
2011
2012 void
2013 expand_builtin_update_setjmp_buf (rtx buf_addr)
2014 {
2015 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
2016 buf_addr = convert_memory_address (Pmode, buf_addr);
2017 rtx stack_save
2018 = gen_rtx_MEM (sa_mode,
2019 memory_address
2020 (sa_mode,
2021 plus_constant (Pmode, buf_addr,
2022 2 * GET_MODE_SIZE (Pmode))));
2023
2024 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2025 }
2026
2027 /* Expand a call to __builtin_prefetch. For a target that does not support
2028 data prefetch, evaluate the memory address argument in case it has side
2029 effects. */
2030
2031 static void
2032 expand_builtin_prefetch (tree exp)
2033 {
2034 tree arg0, arg1, arg2;
2035 int nargs;
2036 rtx op0, op1, op2;
2037
2038 if (!validate_arglist (exp, POINTER_TYPE, 0))
2039 return;
2040
2041 arg0 = CALL_EXPR_ARG (exp, 0);
2042
2043 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
2044 zero (read) and argument 2 (locality) defaults to 3 (high degree of
2045 locality). */
2046 nargs = call_expr_nargs (exp);
2047 if (nargs > 1)
2048 arg1 = CALL_EXPR_ARG (exp, 1);
2049 else
2050 arg1 = integer_zero_node;
2051 if (nargs > 2)
2052 arg2 = CALL_EXPR_ARG (exp, 2);
2053 else
2054 arg2 = integer_three_node;
2055
2056 /* Argument 0 is an address. */
2057 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
2058
2059 /* Argument 1 (read/write flag) must be a compile-time constant int. */
2060 if (TREE_CODE (arg1) != INTEGER_CST)
2061 {
2062 error ("second argument to %<__builtin_prefetch%> must be a constant");
2063 arg1 = integer_zero_node;
2064 }
2065 op1 = expand_normal (arg1);
2066 /* Argument 1 must be either zero or one. */
2067 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
2068 {
2069 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
2070 " using zero");
2071 op1 = const0_rtx;
2072 }
2073
2074 /* Argument 2 (locality) must be a compile-time constant int. */
2075 if (TREE_CODE (arg2) != INTEGER_CST)
2076 {
2077 error ("third argument to %<__builtin_prefetch%> must be a constant");
2078 arg2 = integer_zero_node;
2079 }
2080 op2 = expand_normal (arg2);
2081 /* Argument 2 must be 0, 1, 2, or 3. */
2082 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
2083 {
2084 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
2085 op2 = const0_rtx;
2086 }
2087
2088 if (targetm.have_prefetch ())
2089 {
2090 class expand_operand ops[3];
2091
2092 create_address_operand (&ops[0], op0);
2093 create_integer_operand (&ops[1], INTVAL (op1));
2094 create_integer_operand (&ops[2], INTVAL (op2));
2095 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
2096 return;
2097 }
2098
2099 /* Don't do anything with direct references to volatile memory, but
2100 generate code to handle other side effects. */
2101 if (!MEM_P (op0) && side_effects_p (op0))
2102 emit_insn (op0);
2103 }
2104
2105 /* Get a MEM rtx for expression EXP which is the address of an operand
2106 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
2107 the maximum length of the block of memory that might be accessed or
2108 NULL if unknown. */
2109
2110 static rtx
2111 get_memory_rtx (tree exp, tree len)
2112 {
2113 tree orig_exp = exp;
2114 rtx addr, mem;
2115
2116 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
2117 from its expression, for expr->a.b only <variable>.a.b is recorded. */
2118 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
2119 exp = TREE_OPERAND (exp, 0);
2120
2121 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2122 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2123
2124 /* Get an expression we can use to find the attributes to assign to MEM.
2125 First remove any nops. */
2126 while (CONVERT_EXPR_P (exp)
2127 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2128 exp = TREE_OPERAND (exp, 0);
2129
2130 /* Build a MEM_REF representing the whole accessed area as a byte blob,
2131 (as builtin stringops may alias with anything). */
2132 exp = fold_build2 (MEM_REF,
2133 build_array_type (char_type_node,
2134 build_range_type (sizetype,
2135 size_one_node, len)),
2136 exp, build_int_cst (ptr_type_node, 0));
2137
2138 /* If the MEM_REF has no acceptable address, try to get the base object
2139 from the original address we got, and build an all-aliasing
2140 unknown-sized access to that one. */
2141 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2142 set_mem_attributes (mem, exp, 0);
2143 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2144 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
2145 0))))
2146 {
2147 exp = build_fold_addr_expr (exp);
2148 exp = fold_build2 (MEM_REF,
2149 build_array_type (char_type_node,
2150 build_range_type (sizetype,
2151 size_zero_node,
2152 NULL)),
2153 exp, build_int_cst (ptr_type_node, 0));
2154 set_mem_attributes (mem, exp, 0);
2155 }
2156 set_mem_alias_set (mem, 0);
2157 return mem;
2158 }
2159 \f
2160 /* Built-in functions to perform an untyped call and return. */
2161
2162 #define apply_args_mode \
2163 (this_target_builtins->x_apply_args_mode)
2164 #define apply_result_mode \
2165 (this_target_builtins->x_apply_result_mode)
2166
2167 /* Return the size required for the block returned by __builtin_apply_args,
2168 and initialize apply_args_mode. */
2169
2170 static int
2171 apply_args_size (void)
2172 {
2173 static int size = -1;
2174 int align;
2175 unsigned int regno;
2176
2177 /* The values computed by this function never change. */
2178 if (size < 0)
2179 {
2180 /* The first value is the incoming arg-pointer. */
2181 size = GET_MODE_SIZE (Pmode);
2182
2183 /* The second value is the structure value address unless this is
2184 passed as an "invisible" first argument. */
2185 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2186 size += GET_MODE_SIZE (Pmode);
2187
2188 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2189 if (FUNCTION_ARG_REGNO_P (regno))
2190 {
2191 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
2192
2193 gcc_assert (mode != VOIDmode);
2194
2195 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2196 if (size % align != 0)
2197 size = CEIL (size, align) * align;
2198 size += GET_MODE_SIZE (mode);
2199 apply_args_mode[regno] = mode;
2200 }
2201 else
2202 {
2203 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2204 }
2205 }
2206 return size;
2207 }
2208
2209 /* Return the size required for the block returned by __builtin_apply,
2210 and initialize apply_result_mode. */
2211
2212 static int
2213 apply_result_size (void)
2214 {
2215 static int size = -1;
2216 int align, regno;
2217
2218 /* The values computed by this function never change. */
2219 if (size < 0)
2220 {
2221 size = 0;
2222
2223 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2224 if (targetm.calls.function_value_regno_p (regno))
2225 {
2226 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
2227
2228 gcc_assert (mode != VOIDmode);
2229
2230 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2231 if (size % align != 0)
2232 size = CEIL (size, align) * align;
2233 size += GET_MODE_SIZE (mode);
2234 apply_result_mode[regno] = mode;
2235 }
2236 else
2237 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2238
2239 /* Allow targets that use untyped_call and untyped_return to override
2240 the size so that machine-specific information can be stored here. */
2241 #ifdef APPLY_RESULT_SIZE
2242 size = APPLY_RESULT_SIZE;
2243 #endif
2244 }
2245 return size;
2246 }
2247
2248 /* Create a vector describing the result block RESULT. If SAVEP is true,
2249 the result block is used to save the values; otherwise it is used to
2250 restore the values. */
2251
2252 static rtx
2253 result_vector (int savep, rtx result)
2254 {
2255 int regno, size, align, nelts;
2256 fixed_size_mode mode;
2257 rtx reg, mem;
2258 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
2259
2260 size = nelts = 0;
2261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2262 if ((mode = apply_result_mode[regno]) != VOIDmode)
2263 {
2264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2265 if (size % align != 0)
2266 size = CEIL (size, align) * align;
2267 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
2268 mem = adjust_address (result, mode, size);
2269 savevec[nelts++] = (savep
2270 ? gen_rtx_SET (mem, reg)
2271 : gen_rtx_SET (reg, mem));
2272 size += GET_MODE_SIZE (mode);
2273 }
2274 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
2275 }
2276
2277 /* Save the state required to perform an untyped call with the same
2278 arguments as were passed to the current function. */
2279
2280 static rtx
2281 expand_builtin_apply_args_1 (void)
2282 {
2283 rtx registers, tem;
2284 int size, align, regno;
2285 fixed_size_mode mode;
2286 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
2287
2288 /* Create a block where the arg-pointer, structure value address,
2289 and argument registers can be saved. */
2290 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
2291
2292 /* Walk past the arg-pointer and structure value address. */
2293 size = GET_MODE_SIZE (Pmode);
2294 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2295 size += GET_MODE_SIZE (Pmode);
2296
2297 /* Save each register used in calling a function to the block. */
2298 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2299 if ((mode = apply_args_mode[regno]) != VOIDmode)
2300 {
2301 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2302 if (size % align != 0)
2303 size = CEIL (size, align) * align;
2304
2305 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2306
2307 emit_move_insn (adjust_address (registers, mode, size), tem);
2308 size += GET_MODE_SIZE (mode);
2309 }
2310
2311 /* Save the arg pointer to the block. */
2312 tem = copy_to_reg (crtl->args.internal_arg_pointer);
2313 /* We need the pointer as the caller actually passed them to us, not
2314 as we might have pretended they were passed. Make sure it's a valid
2315 operand, as emit_move_insn isn't expected to handle a PLUS. */
2316 if (STACK_GROWS_DOWNWARD)
2317 tem
2318 = force_operand (plus_constant (Pmode, tem,
2319 crtl->args.pretend_args_size),
2320 NULL_RTX);
2321 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
2322
2323 size = GET_MODE_SIZE (Pmode);
2324
2325 /* Save the structure value address unless this is passed as an
2326 "invisible" first argument. */
2327 if (struct_incoming_value)
2328 emit_move_insn (adjust_address (registers, Pmode, size),
2329 copy_to_reg (struct_incoming_value));
2330
2331 /* Return the address of the block. */
2332 return copy_addr_to_reg (XEXP (registers, 0));
2333 }
2334
2335 /* __builtin_apply_args returns block of memory allocated on
2336 the stack into which is stored the arg pointer, structure
2337 value address, static chain, and all the registers that might
2338 possibly be used in performing a function call. The code is
2339 moved to the start of the function so the incoming values are
2340 saved. */
2341
2342 static rtx
2343 expand_builtin_apply_args (void)
2344 {
2345 /* Don't do __builtin_apply_args more than once in a function.
2346 Save the result of the first call and reuse it. */
2347 if (apply_args_value != 0)
2348 return apply_args_value;
2349 {
2350 /* When this function is called, it means that registers must be
2351 saved on entry to this function. So we migrate the
2352 call to the first insn of this function. */
2353 rtx temp;
2354
2355 start_sequence ();
2356 temp = expand_builtin_apply_args_1 ();
2357 rtx_insn *seq = get_insns ();
2358 end_sequence ();
2359
2360 apply_args_value = temp;
2361
2362 /* Put the insns after the NOTE that starts the function.
2363 If this is inside a start_sequence, make the outer-level insn
2364 chain current, so the code is placed at the start of the
2365 function. If internal_arg_pointer is a non-virtual pseudo,
2366 it needs to be placed after the function that initializes
2367 that pseudo. */
2368 push_topmost_sequence ();
2369 if (REG_P (crtl->args.internal_arg_pointer)
2370 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
2371 emit_insn_before (seq, parm_birth_insn);
2372 else
2373 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
2374 pop_topmost_sequence ();
2375 return temp;
2376 }
2377 }
2378
2379 /* Perform an untyped call and save the state required to perform an
2380 untyped return of whatever value was returned by the given function. */
2381
2382 static rtx
2383 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
2384 {
2385 int size, align, regno;
2386 fixed_size_mode mode;
2387 rtx incoming_args, result, reg, dest, src;
2388 rtx_call_insn *call_insn;
2389 rtx old_stack_level = 0;
2390 rtx call_fusage = 0;
2391 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
2392
2393 arguments = convert_memory_address (Pmode, arguments);
2394
2395 /* Create a block where the return registers can be saved. */
2396 result = assign_stack_local (BLKmode, apply_result_size (), -1);
2397
2398 /* Fetch the arg pointer from the ARGUMENTS block. */
2399 incoming_args = gen_reg_rtx (Pmode);
2400 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
2401 if (!STACK_GROWS_DOWNWARD)
2402 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
2403 incoming_args, 0, OPTAB_LIB_WIDEN);
2404
2405 /* Push a new argument block and copy the arguments. Do not allow
2406 the (potential) memcpy call below to interfere with our stack
2407 manipulations. */
2408 do_pending_stack_adjust ();
2409 NO_DEFER_POP;
2410
2411 /* Save the stack with nonlocal if available. */
2412 if (targetm.have_save_stack_nonlocal ())
2413 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
2414 else
2415 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2416
2417 /* Allocate a block of memory onto the stack and copy the memory
2418 arguments to the outgoing arguments address. We can pass TRUE
2419 as the 4th argument because we just saved the stack pointer
2420 and will restore it right after the call. */
2421 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2422
2423 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
2424 may have already set current_function_calls_alloca to true.
2425 current_function_calls_alloca won't be set if argsize is zero,
2426 so we have to guarantee need_drap is true here. */
2427 if (SUPPORTS_STACK_ALIGNMENT)
2428 crtl->need_drap = true;
2429
2430 dest = virtual_outgoing_args_rtx;
2431 if (!STACK_GROWS_DOWNWARD)
2432 {
2433 if (CONST_INT_P (argsize))
2434 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
2435 else
2436 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
2437 }
2438 dest = gen_rtx_MEM (BLKmode, dest);
2439 set_mem_align (dest, PARM_BOUNDARY);
2440 src = gen_rtx_MEM (BLKmode, incoming_args);
2441 set_mem_align (src, PARM_BOUNDARY);
2442 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
2443
2444 /* Refer to the argument block. */
2445 apply_args_size ();
2446 arguments = gen_rtx_MEM (BLKmode, arguments);
2447 set_mem_align (arguments, PARM_BOUNDARY);
2448
2449 /* Walk past the arg-pointer and structure value address. */
2450 size = GET_MODE_SIZE (Pmode);
2451 if (struct_value)
2452 size += GET_MODE_SIZE (Pmode);
2453
2454 /* Restore each of the registers previously saved. Make USE insns
2455 for each of these registers for use in making the call. */
2456 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2457 if ((mode = apply_args_mode[regno]) != VOIDmode)
2458 {
2459 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2460 if (size % align != 0)
2461 size = CEIL (size, align) * align;
2462 reg = gen_rtx_REG (mode, regno);
2463 emit_move_insn (reg, adjust_address (arguments, mode, size));
2464 use_reg (&call_fusage, reg);
2465 size += GET_MODE_SIZE (mode);
2466 }
2467
2468 /* Restore the structure value address unless this is passed as an
2469 "invisible" first argument. */
2470 size = GET_MODE_SIZE (Pmode);
2471 if (struct_value)
2472 {
2473 rtx value = gen_reg_rtx (Pmode);
2474 emit_move_insn (value, adjust_address (arguments, Pmode, size));
2475 emit_move_insn (struct_value, value);
2476 if (REG_P (struct_value))
2477 use_reg (&call_fusage, struct_value);
2478 }
2479
2480 /* All arguments and registers used for the call are set up by now! */
2481 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
2482
2483 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
2484 and we don't want to load it into a register as an optimization,
2485 because prepare_call_address already did it if it should be done. */
2486 if (GET_CODE (function) != SYMBOL_REF)
2487 function = memory_address (FUNCTION_MODE, function);
2488
2489 /* Generate the actual call instruction and save the return value. */
2490 if (targetm.have_untyped_call ())
2491 {
2492 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
2493 emit_call_insn (targetm.gen_untyped_call (mem, result,
2494 result_vector (1, result)));
2495 }
2496 else if (targetm.have_call_value ())
2497 {
2498 rtx valreg = 0;
2499
2500 /* Locate the unique return register. It is not possible to
2501 express a call that sets more than one return register using
2502 call_value; use untyped_call for that. In fact, untyped_call
2503 only needs to save the return registers in the given block. */
2504 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2505 if ((mode = apply_result_mode[regno]) != VOIDmode)
2506 {
2507 gcc_assert (!valreg); /* have_untyped_call required. */
2508
2509 valreg = gen_rtx_REG (mode, regno);
2510 }
2511
2512 emit_insn (targetm.gen_call_value (valreg,
2513 gen_rtx_MEM (FUNCTION_MODE, function),
2514 const0_rtx, NULL_RTX, const0_rtx));
2515
2516 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
2517 }
2518 else
2519 gcc_unreachable ();
2520
2521 /* Find the CALL insn we just emitted, and attach the register usage
2522 information. */
2523 call_insn = last_call_insn ();
2524 add_function_usage_to (call_insn, call_fusage);
2525
2526 /* Restore the stack. */
2527 if (targetm.have_save_stack_nonlocal ())
2528 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
2529 else
2530 emit_stack_restore (SAVE_BLOCK, old_stack_level);
2531 fixup_args_size_notes (call_insn, get_last_insn (), 0);
2532
2533 OK_DEFER_POP;
2534
2535 /* Return the address of the result block. */
2536 result = copy_addr_to_reg (XEXP (result, 0));
2537 return convert_memory_address (ptr_mode, result);
2538 }
2539
2540 /* Perform an untyped return. */
2541
2542 static void
2543 expand_builtin_return (rtx result)
2544 {
2545 int size, align, regno;
2546 fixed_size_mode mode;
2547 rtx reg;
2548 rtx_insn *call_fusage = 0;
2549
2550 result = convert_memory_address (Pmode, result);
2551
2552 apply_result_size ();
2553 result = gen_rtx_MEM (BLKmode, result);
2554
2555 if (targetm.have_untyped_return ())
2556 {
2557 rtx vector = result_vector (0, result);
2558 emit_jump_insn (targetm.gen_untyped_return (result, vector));
2559 emit_barrier ();
2560 return;
2561 }
2562
2563 /* Restore the return value and note that each value is used. */
2564 size = 0;
2565 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2566 if ((mode = apply_result_mode[regno]) != VOIDmode)
2567 {
2568 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2569 if (size % align != 0)
2570 size = CEIL (size, align) * align;
2571 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2572 emit_move_insn (reg, adjust_address (result, mode, size));
2573
2574 push_to_sequence (call_fusage);
2575 emit_use (reg);
2576 call_fusage = get_insns ();
2577 end_sequence ();
2578 size += GET_MODE_SIZE (mode);
2579 }
2580
2581 /* Put the USE insns before the return. */
2582 emit_insn (call_fusage);
2583
2584 /* Return whatever values was restored by jumping directly to the end
2585 of the function. */
2586 expand_naked_return ();
2587 }
2588
2589 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
2590
2591 static enum type_class
2592 type_to_class (tree type)
2593 {
2594 switch (TREE_CODE (type))
2595 {
2596 case VOID_TYPE: return void_type_class;
2597 case INTEGER_TYPE: return integer_type_class;
2598 case ENUMERAL_TYPE: return enumeral_type_class;
2599 case BOOLEAN_TYPE: return boolean_type_class;
2600 case POINTER_TYPE: return pointer_type_class;
2601 case REFERENCE_TYPE: return reference_type_class;
2602 case OFFSET_TYPE: return offset_type_class;
2603 case REAL_TYPE: return real_type_class;
2604 case COMPLEX_TYPE: return complex_type_class;
2605 case FUNCTION_TYPE: return function_type_class;
2606 case METHOD_TYPE: return method_type_class;
2607 case RECORD_TYPE: return record_type_class;
2608 case UNION_TYPE:
2609 case QUAL_UNION_TYPE: return union_type_class;
2610 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
2611 ? string_type_class : array_type_class);
2612 case LANG_TYPE: return lang_type_class;
2613 case OPAQUE_TYPE: return opaque_type_class;
2614 default: return no_type_class;
2615 }
2616 }
2617
2618 /* Expand a call EXP to __builtin_classify_type. */
2619
2620 static rtx
2621 expand_builtin_classify_type (tree exp)
2622 {
2623 if (call_expr_nargs (exp))
2624 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
2625 return GEN_INT (no_type_class);
2626 }
2627
2628 /* This helper macro, meant to be used in mathfn_built_in below, determines
2629 which among a set of builtin math functions is appropriate for a given type
2630 mode. The `F' (float) and `L' (long double) are automatically generated
2631 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2632 types, there are additional types that are considered with 'F32', 'F64',
2633 'F128', etc. suffixes. */
2634 #define CASE_MATHFN(MATHFN) \
2635 CASE_CFN_##MATHFN: \
2636 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2637 fcodel = BUILT_IN_##MATHFN##L ; break;
2638 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2639 types. */
2640 #define CASE_MATHFN_FLOATN(MATHFN) \
2641 CASE_CFN_##MATHFN: \
2642 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2643 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2644 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2645 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2646 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2647 break;
2648 /* Similar to above, but appends _R after any F/L suffix. */
2649 #define CASE_MATHFN_REENT(MATHFN) \
2650 case CFN_BUILT_IN_##MATHFN##_R: \
2651 case CFN_BUILT_IN_##MATHFN##F_R: \
2652 case CFN_BUILT_IN_##MATHFN##L_R: \
2653 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2654 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2655
2656 /* Return a function equivalent to FN but operating on floating-point
2657 values of type TYPE, or END_BUILTINS if no such function exists.
2658 This is purely an operation on function codes; it does not guarantee
2659 that the target actually has an implementation of the function. */
2660
2661 static built_in_function
2662 mathfn_built_in_2 (tree type, combined_fn fn)
2663 {
2664 tree mtype;
2665 built_in_function fcode, fcodef, fcodel;
2666 built_in_function fcodef16 = END_BUILTINS;
2667 built_in_function fcodef32 = END_BUILTINS;
2668 built_in_function fcodef64 = END_BUILTINS;
2669 built_in_function fcodef128 = END_BUILTINS;
2670 built_in_function fcodef32x = END_BUILTINS;
2671 built_in_function fcodef64x = END_BUILTINS;
2672 built_in_function fcodef128x = END_BUILTINS;
2673
2674 switch (fn)
2675 {
2676 #define SEQ_OF_CASE_MATHFN \
2677 CASE_MATHFN (ACOS) \
2678 CASE_MATHFN (ACOSH) \
2679 CASE_MATHFN (ASIN) \
2680 CASE_MATHFN (ASINH) \
2681 CASE_MATHFN (ATAN) \
2682 CASE_MATHFN (ATAN2) \
2683 CASE_MATHFN (ATANH) \
2684 CASE_MATHFN (CBRT) \
2685 CASE_MATHFN_FLOATN (CEIL) \
2686 CASE_MATHFN (CEXPI) \
2687 CASE_MATHFN_FLOATN (COPYSIGN) \
2688 CASE_MATHFN (COS) \
2689 CASE_MATHFN (COSH) \
2690 CASE_MATHFN (DREM) \
2691 CASE_MATHFN (ERF) \
2692 CASE_MATHFN (ERFC) \
2693 CASE_MATHFN (EXP) \
2694 CASE_MATHFN (EXP10) \
2695 CASE_MATHFN (EXP2) \
2696 CASE_MATHFN (EXPM1) \
2697 CASE_MATHFN (FABS) \
2698 CASE_MATHFN (FDIM) \
2699 CASE_MATHFN_FLOATN (FLOOR) \
2700 CASE_MATHFN_FLOATN (FMA) \
2701 CASE_MATHFN_FLOATN (FMAX) \
2702 CASE_MATHFN_FLOATN (FMIN) \
2703 CASE_MATHFN (FMOD) \
2704 CASE_MATHFN (FREXP) \
2705 CASE_MATHFN (GAMMA) \
2706 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2707 CASE_MATHFN (HUGE_VAL) \
2708 CASE_MATHFN (HYPOT) \
2709 CASE_MATHFN (ILOGB) \
2710 CASE_MATHFN (ICEIL) \
2711 CASE_MATHFN (IFLOOR) \
2712 CASE_MATHFN (INF) \
2713 CASE_MATHFN (IRINT) \
2714 CASE_MATHFN (IROUND) \
2715 CASE_MATHFN (ISINF) \
2716 CASE_MATHFN (J0) \
2717 CASE_MATHFN (J1) \
2718 CASE_MATHFN (JN) \
2719 CASE_MATHFN (LCEIL) \
2720 CASE_MATHFN (LDEXP) \
2721 CASE_MATHFN (LFLOOR) \
2722 CASE_MATHFN (LGAMMA) \
2723 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2724 CASE_MATHFN (LLCEIL) \
2725 CASE_MATHFN (LLFLOOR) \
2726 CASE_MATHFN (LLRINT) \
2727 CASE_MATHFN (LLROUND) \
2728 CASE_MATHFN (LOG) \
2729 CASE_MATHFN (LOG10) \
2730 CASE_MATHFN (LOG1P) \
2731 CASE_MATHFN (LOG2) \
2732 CASE_MATHFN (LOGB) \
2733 CASE_MATHFN (LRINT) \
2734 CASE_MATHFN (LROUND) \
2735 CASE_MATHFN (MODF) \
2736 CASE_MATHFN (NAN) \
2737 CASE_MATHFN (NANS) \
2738 CASE_MATHFN_FLOATN (NEARBYINT) \
2739 CASE_MATHFN (NEXTAFTER) \
2740 CASE_MATHFN (NEXTTOWARD) \
2741 CASE_MATHFN (POW) \
2742 CASE_MATHFN (POWI) \
2743 CASE_MATHFN (POW10) \
2744 CASE_MATHFN (REMAINDER) \
2745 CASE_MATHFN (REMQUO) \
2746 CASE_MATHFN_FLOATN (RINT) \
2747 CASE_MATHFN_FLOATN (ROUND) \
2748 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2749 CASE_MATHFN (SCALB) \
2750 CASE_MATHFN (SCALBLN) \
2751 CASE_MATHFN (SCALBN) \
2752 CASE_MATHFN (SIGNBIT) \
2753 CASE_MATHFN (SIGNIFICAND) \
2754 CASE_MATHFN (SIN) \
2755 CASE_MATHFN (SINCOS) \
2756 CASE_MATHFN (SINH) \
2757 CASE_MATHFN_FLOATN (SQRT) \
2758 CASE_MATHFN (TAN) \
2759 CASE_MATHFN (TANH) \
2760 CASE_MATHFN (TGAMMA) \
2761 CASE_MATHFN_FLOATN (TRUNC) \
2762 CASE_MATHFN (Y0) \
2763 CASE_MATHFN (Y1) \
2764 CASE_MATHFN (YN)
2765
2766 SEQ_OF_CASE_MATHFN
2767
2768 default:
2769 return END_BUILTINS;
2770 }
2771
2772 mtype = TYPE_MAIN_VARIANT (type);
2773 if (mtype == double_type_node)
2774 return fcode;
2775 else if (mtype == float_type_node)
2776 return fcodef;
2777 else if (mtype == long_double_type_node)
2778 return fcodel;
2779 else if (mtype == float16_type_node)
2780 return fcodef16;
2781 else if (mtype == float32_type_node)
2782 return fcodef32;
2783 else if (mtype == float64_type_node)
2784 return fcodef64;
2785 else if (mtype == float128_type_node)
2786 return fcodef128;
2787 else if (mtype == float32x_type_node)
2788 return fcodef32x;
2789 else if (mtype == float64x_type_node)
2790 return fcodef64x;
2791 else if (mtype == float128x_type_node)
2792 return fcodef128x;
2793 else
2794 return END_BUILTINS;
2795 }
2796
2797 #undef CASE_MATHFN
2798 #undef CASE_MATHFN_FLOATN
2799 #undef CASE_MATHFN_REENT
2800
2801 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2802 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2803 otherwise use the explicit declaration. If we can't do the conversion,
2804 return null. */
2805
2806 static tree
2807 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2808 {
2809 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2810 if (fcode2 == END_BUILTINS)
2811 return NULL_TREE;
2812
2813 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2814 return NULL_TREE;
2815
2816 return builtin_decl_explicit (fcode2);
2817 }
2818
2819 /* Like mathfn_built_in_1, but always use the implicit array. */
2820
2821 tree
2822 mathfn_built_in (tree type, combined_fn fn)
2823 {
2824 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2825 }
2826
2827 /* Like mathfn_built_in_1, but take a built_in_function and
2828 always use the implicit array. */
2829
2830 tree
2831 mathfn_built_in (tree type, enum built_in_function fn)
2832 {
2833 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2834 }
2835
2836 /* Return the type associated with a built in function, i.e., the one
2837 to be passed to mathfn_built_in to get the type-specific
2838 function. */
2839
2840 tree
2841 mathfn_built_in_type (combined_fn fn)
2842 {
2843 #define CASE_MATHFN(MATHFN) \
2844 case CFN_BUILT_IN_##MATHFN: \
2845 return double_type_node; \
2846 case CFN_BUILT_IN_##MATHFN##F: \
2847 return float_type_node; \
2848 case CFN_BUILT_IN_##MATHFN##L: \
2849 return long_double_type_node;
2850
2851 #define CASE_MATHFN_FLOATN(MATHFN) \
2852 CASE_MATHFN(MATHFN) \
2853 case CFN_BUILT_IN_##MATHFN##F16: \
2854 return float16_type_node; \
2855 case CFN_BUILT_IN_##MATHFN##F32: \
2856 return float32_type_node; \
2857 case CFN_BUILT_IN_##MATHFN##F64: \
2858 return float64_type_node; \
2859 case CFN_BUILT_IN_##MATHFN##F128: \
2860 return float128_type_node; \
2861 case CFN_BUILT_IN_##MATHFN##F32X: \
2862 return float32x_type_node; \
2863 case CFN_BUILT_IN_##MATHFN##F64X: \
2864 return float64x_type_node; \
2865 case CFN_BUILT_IN_##MATHFN##F128X: \
2866 return float128x_type_node;
2867
2868 /* Similar to above, but appends _R after any F/L suffix. */
2869 #define CASE_MATHFN_REENT(MATHFN) \
2870 case CFN_BUILT_IN_##MATHFN##_R: \
2871 return double_type_node; \
2872 case CFN_BUILT_IN_##MATHFN##F_R: \
2873 return float_type_node; \
2874 case CFN_BUILT_IN_##MATHFN##L_R: \
2875 return long_double_type_node;
2876
2877 switch (fn)
2878 {
2879 SEQ_OF_CASE_MATHFN
2880
2881 default:
2882 return NULL_TREE;
2883 }
2884
2885 #undef CASE_MATHFN
2886 #undef CASE_MATHFN_FLOATN
2887 #undef CASE_MATHFN_REENT
2888 #undef SEQ_OF_CASE_MATHFN
2889 }
2890
2891 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2892 return its code, otherwise return IFN_LAST. Note that this function
2893 only tests whether the function is defined in internals.def, not whether
2894 it is actually available on the target. */
2895
2896 internal_fn
2897 associated_internal_fn (tree fndecl)
2898 {
2899 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2900 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2901 switch (DECL_FUNCTION_CODE (fndecl))
2902 {
2903 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2904 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2905 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2906 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2907 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2908 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2909 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2910 #include "internal-fn.def"
2911
2912 CASE_FLT_FN (BUILT_IN_POW10):
2913 return IFN_EXP10;
2914
2915 CASE_FLT_FN (BUILT_IN_DREM):
2916 return IFN_REMAINDER;
2917
2918 CASE_FLT_FN (BUILT_IN_SCALBN):
2919 CASE_FLT_FN (BUILT_IN_SCALBLN):
2920 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2921 return IFN_LDEXP;
2922 return IFN_LAST;
2923
2924 default:
2925 return IFN_LAST;
2926 }
2927 }
2928
2929 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2930 on the current target by a call to an internal function, return the
2931 code of that internal function, otherwise return IFN_LAST. The caller
2932 is responsible for ensuring that any side-effects of the built-in
2933 call are dealt with correctly. E.g. if CALL sets errno, the caller
2934 must decide that the errno result isn't needed or make it available
2935 in some other way. */
2936
2937 internal_fn
2938 replacement_internal_fn (gcall *call)
2939 {
2940 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2941 {
2942 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2943 if (ifn != IFN_LAST)
2944 {
2945 tree_pair types = direct_internal_fn_types (ifn, call);
2946 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2947 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2948 return ifn;
2949 }
2950 }
2951 return IFN_LAST;
2952 }
2953
2954 /* Expand a call to the builtin trinary math functions (fma).
2955 Return NULL_RTX if a normal call should be emitted rather than expanding the
2956 function in-line. EXP is the expression that is a call to the builtin
2957 function; if convenient, the result should be placed in TARGET.
2958 SUBTARGET may be used as the target for computing one of EXP's
2959 operands. */
2960
2961 static rtx
2962 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2963 {
2964 optab builtin_optab;
2965 rtx op0, op1, op2, result;
2966 rtx_insn *insns;
2967 tree fndecl = get_callee_fndecl (exp);
2968 tree arg0, arg1, arg2;
2969 machine_mode mode;
2970
2971 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2972 return NULL_RTX;
2973
2974 arg0 = CALL_EXPR_ARG (exp, 0);
2975 arg1 = CALL_EXPR_ARG (exp, 1);
2976 arg2 = CALL_EXPR_ARG (exp, 2);
2977
2978 switch (DECL_FUNCTION_CODE (fndecl))
2979 {
2980 CASE_FLT_FN (BUILT_IN_FMA):
2981 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2982 builtin_optab = fma_optab; break;
2983 default:
2984 gcc_unreachable ();
2985 }
2986
2987 /* Make a suitable register to place result in. */
2988 mode = TYPE_MODE (TREE_TYPE (exp));
2989
2990 /* Before working hard, check whether the instruction is available. */
2991 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2992 return NULL_RTX;
2993
2994 result = gen_reg_rtx (mode);
2995
2996 /* Always stabilize the argument list. */
2997 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2998 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2999 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
3000
3001 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3002 op1 = expand_normal (arg1);
3003 op2 = expand_normal (arg2);
3004
3005 start_sequence ();
3006
3007 /* Compute into RESULT.
3008 Set RESULT to wherever the result comes back. */
3009 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
3010 result, 0);
3011
3012 /* If we were unable to expand via the builtin, stop the sequence
3013 (without outputting the insns) and call to the library function
3014 with the stabilized argument list. */
3015 if (result == 0)
3016 {
3017 end_sequence ();
3018 return expand_call (exp, target, target == const0_rtx);
3019 }
3020
3021 /* Output the entire sequence. */
3022 insns = get_insns ();
3023 end_sequence ();
3024 emit_insn (insns);
3025
3026 return result;
3027 }
3028
3029 /* Expand a call to the builtin sin and cos math functions.
3030 Return NULL_RTX if a normal call should be emitted rather than expanding the
3031 function in-line. EXP is the expression that is a call to the builtin
3032 function; if convenient, the result should be placed in TARGET.
3033 SUBTARGET may be used as the target for computing one of EXP's
3034 operands. */
3035
3036 static rtx
3037 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
3038 {
3039 optab builtin_optab;
3040 rtx op0;
3041 rtx_insn *insns;
3042 tree fndecl = get_callee_fndecl (exp);
3043 machine_mode mode;
3044 tree arg;
3045
3046 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3047 return NULL_RTX;
3048
3049 arg = CALL_EXPR_ARG (exp, 0);
3050
3051 switch (DECL_FUNCTION_CODE (fndecl))
3052 {
3053 CASE_FLT_FN (BUILT_IN_SIN):
3054 CASE_FLT_FN (BUILT_IN_COS):
3055 builtin_optab = sincos_optab; break;
3056 default:
3057 gcc_unreachable ();
3058 }
3059
3060 /* Make a suitable register to place result in. */
3061 mode = TYPE_MODE (TREE_TYPE (exp));
3062
3063 /* Check if sincos insn is available, otherwise fallback
3064 to sin or cos insn. */
3065 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
3066 switch (DECL_FUNCTION_CODE (fndecl))
3067 {
3068 CASE_FLT_FN (BUILT_IN_SIN):
3069 builtin_optab = sin_optab; break;
3070 CASE_FLT_FN (BUILT_IN_COS):
3071 builtin_optab = cos_optab; break;
3072 default:
3073 gcc_unreachable ();
3074 }
3075
3076 /* Before working hard, check whether the instruction is available. */
3077 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
3078 {
3079 rtx result = gen_reg_rtx (mode);
3080
3081 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3082 need to expand the argument again. This way, we will not perform
3083 side-effects more the once. */
3084 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3085
3086 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
3087
3088 start_sequence ();
3089
3090 /* Compute into RESULT.
3091 Set RESULT to wherever the result comes back. */
3092 if (builtin_optab == sincos_optab)
3093 {
3094 int ok;
3095
3096 switch (DECL_FUNCTION_CODE (fndecl))
3097 {
3098 CASE_FLT_FN (BUILT_IN_SIN):
3099 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
3100 break;
3101 CASE_FLT_FN (BUILT_IN_COS):
3102 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
3103 break;
3104 default:
3105 gcc_unreachable ();
3106 }
3107 gcc_assert (ok);
3108 }
3109 else
3110 result = expand_unop (mode, builtin_optab, op0, result, 0);
3111
3112 if (result != 0)
3113 {
3114 /* Output the entire sequence. */
3115 insns = get_insns ();
3116 end_sequence ();
3117 emit_insn (insns);
3118 return result;
3119 }
3120
3121 /* If we were unable to expand via the builtin, stop the sequence
3122 (without outputting the insns) and call to the library function
3123 with the stabilized argument list. */
3124 end_sequence ();
3125 }
3126
3127 return expand_call (exp, target, target == const0_rtx);
3128 }
3129
3130 /* Given an interclass math builtin decl FNDECL and it's argument ARG
3131 return an RTL instruction code that implements the functionality.
3132 If that isn't possible or available return CODE_FOR_nothing. */
3133
3134 static enum insn_code
3135 interclass_mathfn_icode (tree arg, tree fndecl)
3136 {
3137 bool errno_set = false;
3138 optab builtin_optab = unknown_optab;
3139 machine_mode mode;
3140
3141 switch (DECL_FUNCTION_CODE (fndecl))
3142 {
3143 CASE_FLT_FN (BUILT_IN_ILOGB):
3144 errno_set = true; builtin_optab = ilogb_optab; break;
3145 CASE_FLT_FN (BUILT_IN_ISINF):
3146 builtin_optab = isinf_optab; break;
3147 case BUILT_IN_ISNORMAL:
3148 case BUILT_IN_ISFINITE:
3149 CASE_FLT_FN (BUILT_IN_FINITE):
3150 case BUILT_IN_FINITED32:
3151 case BUILT_IN_FINITED64:
3152 case BUILT_IN_FINITED128:
3153 case BUILT_IN_ISINFD32:
3154 case BUILT_IN_ISINFD64:
3155 case BUILT_IN_ISINFD128:
3156 /* These builtins have no optabs (yet). */
3157 break;
3158 default:
3159 gcc_unreachable ();
3160 }
3161
3162 /* There's no easy way to detect the case we need to set EDOM. */
3163 if (flag_errno_math && errno_set)
3164 return CODE_FOR_nothing;
3165
3166 /* Optab mode depends on the mode of the input argument. */
3167 mode = TYPE_MODE (TREE_TYPE (arg));
3168
3169 if (builtin_optab)
3170 return optab_handler (builtin_optab, mode);
3171 return CODE_FOR_nothing;
3172 }
3173
3174 /* Expand a call to one of the builtin math functions that operate on
3175 floating point argument and output an integer result (ilogb, isinf,
3176 isnan, etc).
3177 Return 0 if a normal call should be emitted rather than expanding the
3178 function in-line. EXP is the expression that is a call to the builtin
3179 function; if convenient, the result should be placed in TARGET. */
3180
3181 static rtx
3182 expand_builtin_interclass_mathfn (tree exp, rtx target)
3183 {
3184 enum insn_code icode = CODE_FOR_nothing;
3185 rtx op0;
3186 tree fndecl = get_callee_fndecl (exp);
3187 machine_mode mode;
3188 tree arg;
3189
3190 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3191 return NULL_RTX;
3192
3193 arg = CALL_EXPR_ARG (exp, 0);
3194 icode = interclass_mathfn_icode (arg, fndecl);
3195 mode = TYPE_MODE (TREE_TYPE (arg));
3196
3197 if (icode != CODE_FOR_nothing)
3198 {
3199 class expand_operand ops[1];
3200 rtx_insn *last = get_last_insn ();
3201 tree orig_arg = arg;
3202
3203 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3204 need to expand the argument again. This way, we will not perform
3205 side-effects more the once. */
3206 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3207
3208 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3209
3210 if (mode != GET_MODE (op0))
3211 op0 = convert_to_mode (mode, op0, 0);
3212
3213 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
3214 if (maybe_legitimize_operands (icode, 0, 1, ops)
3215 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
3216 return ops[0].value;
3217
3218 delete_insns_since (last);
3219 CALL_EXPR_ARG (exp, 0) = orig_arg;
3220 }
3221
3222 return NULL_RTX;
3223 }
3224
3225 /* Expand a call to the builtin sincos math function.
3226 Return NULL_RTX if a normal call should be emitted rather than expanding the
3227 function in-line. EXP is the expression that is a call to the builtin
3228 function. */
3229
3230 static rtx
3231 expand_builtin_sincos (tree exp)
3232 {
3233 rtx op0, op1, op2, target1, target2;
3234 machine_mode mode;
3235 tree arg, sinp, cosp;
3236 int result;
3237 location_t loc = EXPR_LOCATION (exp);
3238 tree alias_type, alias_off;
3239
3240 if (!validate_arglist (exp, REAL_TYPE,
3241 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3242 return NULL_RTX;
3243
3244 arg = CALL_EXPR_ARG (exp, 0);
3245 sinp = CALL_EXPR_ARG (exp, 1);
3246 cosp = CALL_EXPR_ARG (exp, 2);
3247
3248 /* Make a suitable register to place result in. */
3249 mode = TYPE_MODE (TREE_TYPE (arg));
3250
3251 /* Check if sincos insn is available, otherwise emit the call. */
3252 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
3253 return NULL_RTX;
3254
3255 target1 = gen_reg_rtx (mode);
3256 target2 = gen_reg_rtx (mode);
3257
3258 op0 = expand_normal (arg);
3259 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
3260 alias_off = build_int_cst (alias_type, 0);
3261 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3262 sinp, alias_off));
3263 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3264 cosp, alias_off));
3265
3266 /* Compute into target1 and target2.
3267 Set TARGET to wherever the result comes back. */
3268 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
3269 gcc_assert (result);
3270
3271 /* Move target1 and target2 to the memory locations indicated
3272 by op1 and op2. */
3273 emit_move_insn (op1, target1);
3274 emit_move_insn (op2, target2);
3275
3276 return const0_rtx;
3277 }
3278
3279 /* Expand a call to the internal cexpi builtin to the sincos math function.
3280 EXP is the expression that is a call to the builtin function; if convenient,
3281 the result should be placed in TARGET. */
3282
3283 static rtx
3284 expand_builtin_cexpi (tree exp, rtx target)
3285 {
3286 tree fndecl = get_callee_fndecl (exp);
3287 tree arg, type;
3288 machine_mode mode;
3289 rtx op0, op1, op2;
3290 location_t loc = EXPR_LOCATION (exp);
3291
3292 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3293 return NULL_RTX;
3294
3295 arg = CALL_EXPR_ARG (exp, 0);
3296 type = TREE_TYPE (arg);
3297 mode = TYPE_MODE (TREE_TYPE (arg));
3298
3299 /* Try expanding via a sincos optab, fall back to emitting a libcall
3300 to sincos or cexp. We are sure we have sincos or cexp because cexpi
3301 is only generated from sincos, cexp or if we have either of them. */
3302 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
3303 {
3304 op1 = gen_reg_rtx (mode);
3305 op2 = gen_reg_rtx (mode);
3306
3307 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3308
3309 /* Compute into op1 and op2. */
3310 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
3311 }
3312 else if (targetm.libc_has_function (function_sincos, type))
3313 {
3314 tree call, fn = NULL_TREE;
3315 tree top1, top2;
3316 rtx op1a, op2a;
3317
3318 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3319 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
3320 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3321 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
3322 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3323 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
3324 else
3325 gcc_unreachable ();
3326
3327 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
3328 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
3329 op1a = copy_addr_to_reg (XEXP (op1, 0));
3330 op2a = copy_addr_to_reg (XEXP (op2, 0));
3331 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
3332 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
3333
3334 /* Make sure not to fold the sincos call again. */
3335 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3336 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
3337 call, 3, arg, top1, top2));
3338 }
3339 else
3340 {
3341 tree call, fn = NULL_TREE, narg;
3342 tree ctype = build_complex_type (type);
3343
3344 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3345 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
3346 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3347 fn = builtin_decl_explicit (BUILT_IN_CEXP);
3348 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3349 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
3350 else
3351 gcc_unreachable ();
3352
3353 /* If we don't have a decl for cexp create one. This is the
3354 friendliest fallback if the user calls __builtin_cexpi
3355 without full target C99 function support. */
3356 if (fn == NULL_TREE)
3357 {
3358 tree fntype;
3359 const char *name = NULL;
3360
3361 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3362 name = "cexpf";
3363 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3364 name = "cexp";
3365 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3366 name = "cexpl";
3367
3368 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
3369 fn = build_fn_decl (name, fntype);
3370 }
3371
3372 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
3373 build_real (type, dconst0), arg);
3374
3375 /* Make sure not to fold the cexp call again. */
3376 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3377 return expand_expr (build_call_nary (ctype, call, 1, narg),
3378 target, VOIDmode, EXPAND_NORMAL);
3379 }
3380
3381 /* Now build the proper return type. */
3382 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
3383 make_tree (TREE_TYPE (arg), op2),
3384 make_tree (TREE_TYPE (arg), op1)),
3385 target, VOIDmode, EXPAND_NORMAL);
3386 }
3387
3388 /* Conveniently construct a function call expression. FNDECL names the
3389 function to be called, N is the number of arguments, and the "..."
3390 parameters are the argument expressions. Unlike build_call_exr
3391 this doesn't fold the call, hence it will always return a CALL_EXPR. */
3392
3393 static tree
3394 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
3395 {
3396 va_list ap;
3397 tree fntype = TREE_TYPE (fndecl);
3398 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
3399
3400 va_start (ap, n);
3401 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
3402 va_end (ap);
3403 SET_EXPR_LOCATION (fn, loc);
3404 return fn;
3405 }
3406
3407 /* Expand a call to one of the builtin rounding functions gcc defines
3408 as an extension (lfloor and lceil). As these are gcc extensions we
3409 do not need to worry about setting errno to EDOM.
3410 If expanding via optab fails, lower expression to (int)(floor(x)).
3411 EXP is the expression that is a call to the builtin function;
3412 if convenient, the result should be placed in TARGET. */
3413
3414 static rtx
3415 expand_builtin_int_roundingfn (tree exp, rtx target)
3416 {
3417 convert_optab builtin_optab;
3418 rtx op0, tmp;
3419 rtx_insn *insns;
3420 tree fndecl = get_callee_fndecl (exp);
3421 enum built_in_function fallback_fn;
3422 tree fallback_fndecl;
3423 machine_mode mode;
3424 tree arg;
3425
3426 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3427 return NULL_RTX;
3428
3429 arg = CALL_EXPR_ARG (exp, 0);
3430
3431 switch (DECL_FUNCTION_CODE (fndecl))
3432 {
3433 CASE_FLT_FN (BUILT_IN_ICEIL):
3434 CASE_FLT_FN (BUILT_IN_LCEIL):
3435 CASE_FLT_FN (BUILT_IN_LLCEIL):
3436 builtin_optab = lceil_optab;
3437 fallback_fn = BUILT_IN_CEIL;
3438 break;
3439
3440 CASE_FLT_FN (BUILT_IN_IFLOOR):
3441 CASE_FLT_FN (BUILT_IN_LFLOOR):
3442 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3443 builtin_optab = lfloor_optab;
3444 fallback_fn = BUILT_IN_FLOOR;
3445 break;
3446
3447 default:
3448 gcc_unreachable ();
3449 }
3450
3451 /* Make a suitable register to place result in. */
3452 mode = TYPE_MODE (TREE_TYPE (exp));
3453
3454 target = gen_reg_rtx (mode);
3455
3456 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3457 need to expand the argument again. This way, we will not perform
3458 side-effects more the once. */
3459 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3460
3461 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3462
3463 start_sequence ();
3464
3465 /* Compute into TARGET. */
3466 if (expand_sfix_optab (target, op0, builtin_optab))
3467 {
3468 /* Output the entire sequence. */
3469 insns = get_insns ();
3470 end_sequence ();
3471 emit_insn (insns);
3472 return target;
3473 }
3474
3475 /* If we were unable to expand via the builtin, stop the sequence
3476 (without outputting the insns). */
3477 end_sequence ();
3478
3479 /* Fall back to floating point rounding optab. */
3480 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3481
3482 /* For non-C99 targets we may end up without a fallback fndecl here
3483 if the user called __builtin_lfloor directly. In this case emit
3484 a call to the floor/ceil variants nevertheless. This should result
3485 in the best user experience for not full C99 targets. */
3486 if (fallback_fndecl == NULL_TREE)
3487 {
3488 tree fntype;
3489 const char *name = NULL;
3490
3491 switch (DECL_FUNCTION_CODE (fndecl))
3492 {
3493 case BUILT_IN_ICEIL:
3494 case BUILT_IN_LCEIL:
3495 case BUILT_IN_LLCEIL:
3496 name = "ceil";
3497 break;
3498 case BUILT_IN_ICEILF:
3499 case BUILT_IN_LCEILF:
3500 case BUILT_IN_LLCEILF:
3501 name = "ceilf";
3502 break;
3503 case BUILT_IN_ICEILL:
3504 case BUILT_IN_LCEILL:
3505 case BUILT_IN_LLCEILL:
3506 name = "ceill";
3507 break;
3508 case BUILT_IN_IFLOOR:
3509 case BUILT_IN_LFLOOR:
3510 case BUILT_IN_LLFLOOR:
3511 name = "floor";
3512 break;
3513 case BUILT_IN_IFLOORF:
3514 case BUILT_IN_LFLOORF:
3515 case BUILT_IN_LLFLOORF:
3516 name = "floorf";
3517 break;
3518 case BUILT_IN_IFLOORL:
3519 case BUILT_IN_LFLOORL:
3520 case BUILT_IN_LLFLOORL:
3521 name = "floorl";
3522 break;
3523 default:
3524 gcc_unreachable ();
3525 }
3526
3527 fntype = build_function_type_list (TREE_TYPE (arg),
3528 TREE_TYPE (arg), NULL_TREE);
3529 fallback_fndecl = build_fn_decl (name, fntype);
3530 }
3531
3532 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3533
3534 tmp = expand_normal (exp);
3535 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3536
3537 /* Truncate the result of floating point optab to integer
3538 via expand_fix (). */
3539 target = gen_reg_rtx (mode);
3540 expand_fix (target, tmp, 0);
3541
3542 return target;
3543 }
3544
3545 /* Expand a call to one of the builtin math functions doing integer
3546 conversion (lrint).
3547 Return 0 if a normal call should be emitted rather than expanding the
3548 function in-line. EXP is the expression that is a call to the builtin
3549 function; if convenient, the result should be placed in TARGET. */
3550
3551 static rtx
3552 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3553 {
3554 convert_optab builtin_optab;
3555 rtx op0;
3556 rtx_insn *insns;
3557 tree fndecl = get_callee_fndecl (exp);
3558 tree arg;
3559 machine_mode mode;
3560 enum built_in_function fallback_fn = BUILT_IN_NONE;
3561
3562 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3563 return NULL_RTX;
3564
3565 arg = CALL_EXPR_ARG (exp, 0);
3566
3567 switch (DECL_FUNCTION_CODE (fndecl))
3568 {
3569 CASE_FLT_FN (BUILT_IN_IRINT):
3570 fallback_fn = BUILT_IN_LRINT;
3571 gcc_fallthrough ();
3572 CASE_FLT_FN (BUILT_IN_LRINT):
3573 CASE_FLT_FN (BUILT_IN_LLRINT):
3574 builtin_optab = lrint_optab;
3575 break;
3576
3577 CASE_FLT_FN (BUILT_IN_IROUND):
3578 fallback_fn = BUILT_IN_LROUND;
3579 gcc_fallthrough ();
3580 CASE_FLT_FN (BUILT_IN_LROUND):
3581 CASE_FLT_FN (BUILT_IN_LLROUND):
3582 builtin_optab = lround_optab;
3583 break;
3584
3585 default:
3586 gcc_unreachable ();
3587 }
3588
3589 /* There's no easy way to detect the case we need to set EDOM. */
3590 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3591 return NULL_RTX;
3592
3593 /* Make a suitable register to place result in. */
3594 mode = TYPE_MODE (TREE_TYPE (exp));
3595
3596 /* There's no easy way to detect the case we need to set EDOM. */
3597 if (!flag_errno_math)
3598 {
3599 rtx result = gen_reg_rtx (mode);
3600
3601 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3602 need to expand the argument again. This way, we will not perform
3603 side-effects more the once. */
3604 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3605
3606 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3607
3608 start_sequence ();
3609
3610 if (expand_sfix_optab (result, op0, builtin_optab))
3611 {
3612 /* Output the entire sequence. */
3613 insns = get_insns ();
3614 end_sequence ();
3615 emit_insn (insns);
3616 return result;
3617 }
3618
3619 /* If we were unable to expand via the builtin, stop the sequence
3620 (without outputting the insns) and call to the library function
3621 with the stabilized argument list. */
3622 end_sequence ();
3623 }
3624
3625 if (fallback_fn != BUILT_IN_NONE)
3626 {
3627 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3628 targets, (int) round (x) should never be transformed into
3629 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3630 a call to lround in the hope that the target provides at least some
3631 C99 functions. This should result in the best user experience for
3632 not full C99 targets. */
3633 tree fallback_fndecl = mathfn_built_in_1
3634 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
3635
3636 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3637 fallback_fndecl, 1, arg);
3638
3639 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3640 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3641 return convert_to_mode (mode, target, 0);
3642 }
3643
3644 return expand_call (exp, target, target == const0_rtx);
3645 }
3646
3647 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3648 a normal call should be emitted rather than expanding the function
3649 in-line. EXP is the expression that is a call to the builtin
3650 function; if convenient, the result should be placed in TARGET. */
3651
3652 static rtx
3653 expand_builtin_powi (tree exp, rtx target)
3654 {
3655 tree arg0, arg1;
3656 rtx op0, op1;
3657 machine_mode mode;
3658 machine_mode mode2;
3659
3660 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3661 return NULL_RTX;
3662
3663 arg0 = CALL_EXPR_ARG (exp, 0);
3664 arg1 = CALL_EXPR_ARG (exp, 1);
3665 mode = TYPE_MODE (TREE_TYPE (exp));
3666
3667 /* Emit a libcall to libgcc. */
3668
3669 /* Mode of the 2nd argument must match that of an int. */
3670 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3671
3672 if (target == NULL_RTX)
3673 target = gen_reg_rtx (mode);
3674
3675 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3676 if (GET_MODE (op0) != mode)
3677 op0 = convert_to_mode (mode, op0, 0);
3678 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3679 if (GET_MODE (op1) != mode2)
3680 op1 = convert_to_mode (mode2, op1, 0);
3681
3682 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3683 target, LCT_CONST, mode,
3684 op0, mode, op1, mode2);
3685
3686 return target;
3687 }
3688
3689 /* Expand expression EXP which is a call to the strlen builtin. Return
3690 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3691 try to get the result in TARGET, if convenient. */
3692
3693 static rtx
3694 expand_builtin_strlen (tree exp, rtx target,
3695 machine_mode target_mode)
3696 {
3697 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3698 return NULL_RTX;
3699
3700 tree src = CALL_EXPR_ARG (exp, 0);
3701 if (!check_read_access (exp, src))
3702 return NULL_RTX;
3703
3704 /* If the length can be computed at compile-time, return it. */
3705 if (tree len = c_strlen (src, 0))
3706 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3707
3708 /* If the length can be computed at compile-time and is constant
3709 integer, but there are side-effects in src, evaluate
3710 src for side-effects, then return len.
3711 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3712 can be optimized into: i++; x = 3; */
3713 tree len = c_strlen (src, 1);
3714 if (len && TREE_CODE (len) == INTEGER_CST)
3715 {
3716 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3717 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3718 }
3719
3720 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3721
3722 /* If SRC is not a pointer type, don't do this operation inline. */
3723 if (align == 0)
3724 return NULL_RTX;
3725
3726 /* Bail out if we can't compute strlen in the right mode. */
3727 machine_mode insn_mode;
3728 enum insn_code icode = CODE_FOR_nothing;
3729 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3730 {
3731 icode = optab_handler (strlen_optab, insn_mode);
3732 if (icode != CODE_FOR_nothing)
3733 break;
3734 }
3735 if (insn_mode == VOIDmode)
3736 return NULL_RTX;
3737
3738 /* Make a place to hold the source address. We will not expand
3739 the actual source until we are sure that the expansion will
3740 not fail -- there are trees that cannot be expanded twice. */
3741 rtx src_reg = gen_reg_rtx (Pmode);
3742
3743 /* Mark the beginning of the strlen sequence so we can emit the
3744 source operand later. */
3745 rtx_insn *before_strlen = get_last_insn ();
3746
3747 class expand_operand ops[4];
3748 create_output_operand (&ops[0], target, insn_mode);
3749 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3750 create_integer_operand (&ops[2], 0);
3751 create_integer_operand (&ops[3], align);
3752 if (!maybe_expand_insn (icode, 4, ops))
3753 return NULL_RTX;
3754
3755 /* Check to see if the argument was declared attribute nonstring
3756 and if so, issue a warning since at this point it's not known
3757 to be nul-terminated. */
3758 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3759
3760 /* Now that we are assured of success, expand the source. */
3761 start_sequence ();
3762 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3763 if (pat != src_reg)
3764 {
3765 #ifdef POINTERS_EXTEND_UNSIGNED
3766 if (GET_MODE (pat) != Pmode)
3767 pat = convert_to_mode (Pmode, pat,
3768 POINTERS_EXTEND_UNSIGNED);
3769 #endif
3770 emit_move_insn (src_reg, pat);
3771 }
3772 pat = get_insns ();
3773 end_sequence ();
3774
3775 if (before_strlen)
3776 emit_insn_after (pat, before_strlen);
3777 else
3778 emit_insn_before (pat, get_insns ());
3779
3780 /* Return the value in the proper mode for this function. */
3781 if (GET_MODE (ops[0].value) == target_mode)
3782 target = ops[0].value;
3783 else if (target != 0)
3784 convert_move (target, ops[0].value, 0);
3785 else
3786 target = convert_to_mode (target_mode, ops[0].value, 0);
3787
3788 return target;
3789 }
3790
3791 /* Expand call EXP to the strnlen built-in, returning the result
3792 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3793
3794 static rtx
3795 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3796 {
3797 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3798 return NULL_RTX;
3799
3800 tree src = CALL_EXPR_ARG (exp, 0);
3801 tree bound = CALL_EXPR_ARG (exp, 1);
3802
3803 if (!bound)
3804 return NULL_RTX;
3805
3806 check_read_access (exp, src, bound);
3807
3808 location_t loc = UNKNOWN_LOCATION;
3809 if (EXPR_HAS_LOCATION (exp))
3810 loc = EXPR_LOCATION (exp);
3811
3812 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3813 so these conversions aren't necessary. */
3814 c_strlen_data lendata = { };
3815 tree len = c_strlen (src, 0, &lendata, 1);
3816 if (len)
3817 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3818
3819 if (TREE_CODE (bound) == INTEGER_CST)
3820 {
3821 if (!len)
3822 return NULL_RTX;
3823
3824 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3825 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3826 }
3827
3828 if (TREE_CODE (bound) != SSA_NAME)
3829 return NULL_RTX;
3830
3831 wide_int min, max;
3832 enum value_range_kind rng = get_range_info (bound, &min, &max);
3833 if (rng != VR_RANGE)
3834 return NULL_RTX;
3835
3836 if (!len || TREE_CODE (len) != INTEGER_CST)
3837 {
3838 bool exact;
3839 lendata.decl = unterminated_array (src, &len, &exact);
3840 if (!lendata.decl)
3841 return NULL_RTX;
3842 }
3843
3844 if (lendata.decl)
3845 return NULL_RTX;
3846
3847 if (wi::gtu_p (min, wi::to_wide (len)))
3848 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3849
3850 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3851 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3852 }
3853
3854 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3855 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3856 a target constant. */
3857
3858 static rtx
3859 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3860 scalar_int_mode mode)
3861 {
3862 /* The REPresentation pointed to by DATA need not be a nul-terminated
3863 string but the caller guarantees it's large enough for MODE. */
3864 const char *rep = (const char *) data;
3865
3866 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3867 }
3868
3869 /* LEN specify length of the block of memcpy/memset operation.
3870 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3871 In some cases we can make very likely guess on max size, then we
3872 set it into PROBABLE_MAX_SIZE. */
3873
3874 static void
3875 determine_block_size (tree len, rtx len_rtx,
3876 unsigned HOST_WIDE_INT *min_size,
3877 unsigned HOST_WIDE_INT *max_size,
3878 unsigned HOST_WIDE_INT *probable_max_size)
3879 {
3880 if (CONST_INT_P (len_rtx))
3881 {
3882 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3883 return;
3884 }
3885 else
3886 {
3887 wide_int min, max;
3888 enum value_range_kind range_type = VR_UNDEFINED;
3889
3890 /* Determine bounds from the type. */
3891 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3892 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3893 else
3894 *min_size = 0;
3895 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3896 *probable_max_size = *max_size
3897 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3898 else
3899 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3900
3901 if (TREE_CODE (len) == SSA_NAME)
3902 range_type = get_range_info (len, &min, &max);
3903 if (range_type == VR_RANGE)
3904 {
3905 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3906 *min_size = min.to_uhwi ();
3907 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3908 *probable_max_size = *max_size = max.to_uhwi ();
3909 }
3910 else if (range_type == VR_ANTI_RANGE)
3911 {
3912 /* Code like
3913
3914 int n;
3915 if (n < 100)
3916 memcpy (a, b, n)
3917
3918 Produce anti range allowing negative values of N. We still
3919 can use the information and make a guess that N is not negative.
3920 */
3921 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3922 *probable_max_size = min.to_uhwi () - 1;
3923 }
3924 }
3925 gcc_checking_assert (*max_size <=
3926 (unsigned HOST_WIDE_INT)
3927 GET_MODE_MASK (GET_MODE (len_rtx)));
3928 }
3929
3930 /* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3931 accessing an object with SIZE. */
3932
3933 static bool
3934 maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func,
3935 tree bndrng[2], tree size, const access_data *pad = NULL)
3936 {
3937 if (!bndrng[0] || TREE_NO_WARNING (exp))
3938 return false;
3939
3940 tree maxobjsize = max_object_size ();
3941
3942 bool warned = false;
3943
3944 if (opt == OPT_Wstringop_overread)
3945 {
3946 bool maybe = pad && pad->src.phi ();
3947
3948 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3949 {
3950 if (bndrng[0] == bndrng[1])
3951 warned = (func
3952 ? warning_at (loc, opt,
3953 (maybe
3954 ? G_("%K%qD specified bound %E may "
3955 "exceed maximum object size %E")
3956 : G_("%K%qD specified bound %E "
3957 "exceeds maximum object size %E")),
3958 exp, func, bndrng[0], maxobjsize)
3959 : warning_at (loc, opt,
3960 (maybe
3961 ? G_("%Kspecified bound %E may "
3962 "exceed maximum object size %E")
3963 : G_("%Kspecified bound %E "
3964 "exceeds maximum object size %E")),
3965 exp, bndrng[0], maxobjsize));
3966 else
3967 warned = (func
3968 ? warning_at (loc, opt,
3969 (maybe
3970 ? G_("%K%qD specified bound [%E, %E] may "
3971 "exceed maximum object size %E")
3972 : G_("%K%qD specified bound [%E, %E] "
3973 "exceeds maximum object size %E")),
3974 exp, func,
3975 bndrng[0], bndrng[1], maxobjsize)
3976 : warning_at (loc, opt,
3977 (maybe
3978 ? G_("%Kspecified bound [%E, %E] may "
3979 "exceed maximum object size %E")
3980 : G_("%Kspecified bound [%E, %E] "
3981 "exceeds maximum object size %E")),
3982 exp, bndrng[0], bndrng[1], maxobjsize));
3983 }
3984 else if (!size || tree_int_cst_le (bndrng[0], size))
3985 return false;
3986 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3987 warned = (func
3988 ? warning_at (loc, opt,
3989 (maybe
3990 ? G_("%K%qD specified bound %E may exceed "
3991 "source size %E")
3992 : G_("%K%qD specified bound %E exceeds "
3993 "source size %E")),
3994 exp, func, bndrng[0], size)
3995 : warning_at (loc, opt,
3996 (maybe
3997 ? G_("%Kspecified bound %E may exceed "
3998 "source size %E")
3999 : G_("%Kspecified bound %E exceeds "
4000 "source size %E")),
4001 exp, bndrng[0], size));
4002 else
4003 warned = (func
4004 ? warning_at (loc, opt,
4005 (maybe
4006 ? G_("%K%qD specified bound [%E, %E] may "
4007 "exceed source size %E")
4008 : G_("%K%qD specified bound [%E, %E] exceeds "
4009 "source size %E")),
4010 exp, func, bndrng[0], bndrng[1], size)
4011 : warning_at (loc, opt,
4012 (maybe
4013 ? G_("%Kspecified bound [%E, %E] may exceed "
4014 "source size %E")
4015 : G_("%Kspecified bound [%E, %E] exceeds "
4016 "source size %E")),
4017 exp, bndrng[0], bndrng[1], size));
4018 if (warned)
4019 {
4020 if (pad && pad->src.ref)
4021 {
4022 if (DECL_P (pad->src.ref))
4023 inform (DECL_SOURCE_LOCATION (pad->src.ref),
4024 "source object declared here");
4025 else if (EXPR_HAS_LOCATION (pad->src.ref))
4026 inform (EXPR_LOCATION (pad->src.ref),
4027 "source object allocated here");
4028 }
4029 TREE_NO_WARNING (exp) = true;
4030 }
4031
4032 return warned;
4033 }
4034
4035 bool maybe = pad && pad->dst.phi ();
4036 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
4037 {
4038 if (bndrng[0] == bndrng[1])
4039 warned = (func
4040 ? warning_at (loc, opt,
4041 (maybe
4042 ? G_("%K%qD specified size %E may "
4043 "exceed maximum object size %E")
4044 : G_("%K%qD specified size %E "
4045 "exceeds maximum object size %E")),
4046 exp, func, bndrng[0], maxobjsize)
4047 : warning_at (loc, opt,
4048 (maybe
4049 ? G_("%Kspecified size %E may exceed "
4050 "maximum object size %E")
4051 : G_("%Kspecified size %E exceeds "
4052 "maximum object size %E")),
4053 exp, bndrng[0], maxobjsize));
4054 else
4055 warned = (func
4056 ? warning_at (loc, opt,
4057 (maybe
4058 ? G_("%K%qD specified size between %E and %E "
4059 "may exceed maximum object size %E")
4060 : G_("%K%qD specified size between %E and %E "
4061 "exceeds maximum object size %E")),
4062 exp, func,
4063 bndrng[0], bndrng[1], maxobjsize)
4064 : warning_at (loc, opt,
4065 (maybe
4066 ? G_("%Kspecified size between %E and %E "
4067 "may exceed maximum object size %E")
4068 : G_("%Kspecified size between %E and %E "
4069 "exceeds maximum object size %E")),
4070 exp, bndrng[0], bndrng[1], maxobjsize));
4071 }
4072 else if (!size || tree_int_cst_le (bndrng[0], size))
4073 return false;
4074 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
4075 warned = (func
4076 ? warning_at (loc, OPT_Wstringop_overflow_,
4077 (maybe
4078 ? G_("%K%qD specified bound %E may exceed "
4079 "destination size %E")
4080 : G_("%K%qD specified bound %E exceeds "
4081 "destination size %E")),
4082 exp, func, bndrng[0], size)
4083 : warning_at (loc, OPT_Wstringop_overflow_,
4084 (maybe
4085 ? G_("%Kspecified bound %E may exceed "
4086 "destination size %E")
4087 : G_("%Kspecified bound %E exceeds "
4088 "destination size %E")),
4089 exp, bndrng[0], size));
4090 else
4091 warned = (func
4092 ? warning_at (loc, OPT_Wstringop_overflow_,
4093 (maybe
4094 ? G_("%K%qD specified bound [%E, %E] may exceed "
4095 "destination size %E")
4096 : G_("%K%qD specified bound [%E, %E] exceeds "
4097 "destination size %E")),
4098 exp, func, bndrng[0], bndrng[1], size)
4099 : warning_at (loc, OPT_Wstringop_overflow_,
4100 (maybe
4101 ? G_("%Kspecified bound [%E, %E] exceeds "
4102 "destination size %E")
4103 : G_("%Kspecified bound [%E, %E] exceeds "
4104 "destination size %E")),
4105 exp, bndrng[0], bndrng[1], size));
4106
4107 if (warned)
4108 {
4109 if (pad && pad->dst.ref)
4110 {
4111 if (DECL_P (pad->dst.ref))
4112 inform (DECL_SOURCE_LOCATION (pad->dst.ref),
4113 "destination object declared here");
4114 else if (EXPR_HAS_LOCATION (pad->dst.ref))
4115 inform (EXPR_LOCATION (pad->dst.ref),
4116 "destination object allocated here");
4117 }
4118 TREE_NO_WARNING (exp) = true;
4119 }
4120
4121 return warned;
4122 }
4123
4124 /* For an expression EXP issue an access warning controlled by option OPT
4125 with access to a region SIZE bytes in size in the RANGE of sizes.
4126 WRITE is true for a write access, READ for a read access, neither for
4127 call that may or may not perform an access but for which the range
4128 is expected to valid.
4129 Returns true when a warning has been issued. */
4130
4131 static bool
4132 warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
4133 tree size, bool write, bool read, bool maybe)
4134 {
4135 bool warned = false;
4136
4137 if (write && read)
4138 {
4139 if (tree_int_cst_equal (range[0], range[1]))
4140 warned = (func
4141 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4142 (maybe
4143 ? G_("%K%qD may access %E byte in a region "
4144 "of size %E")
4145 : G_("%K%qD accessing %E byte in a region "
4146 "of size %E")),
4147 (maybe
4148 ? G_ ("%K%qD may access %E bytes in a region "
4149 "of size %E")
4150 : G_ ("%K%qD accessing %E bytes in a region "
4151 "of size %E")),
4152 exp, func, range[0], size)
4153 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4154 (maybe
4155 ? G_("%Kmay access %E byte in a region "
4156 "of size %E")
4157 : G_("%Kaccessing %E byte in a region "
4158 "of size %E")),
4159 (maybe
4160 ? G_("%Kmay access %E bytes in a region "
4161 "of size %E")
4162 : G_("%Kaccessing %E bytes in a region "
4163 "of size %E")),
4164 exp, range[0], size));
4165 else if (tree_int_cst_sign_bit (range[1]))
4166 {
4167 /* Avoid printing the upper bound if it's invalid. */
4168 warned = (func
4169 ? warning_at (loc, opt,
4170 (maybe
4171 ? G_("%K%qD may access %E or more bytes "
4172 "in a region of size %E")
4173 : G_("%K%qD accessing %E or more bytes "
4174 "in a region of size %E")),
4175 exp, func, range[0], size)
4176 : warning_at (loc, opt,
4177 (maybe
4178 ? G_("%Kmay access %E or more bytes "
4179 "in a region of size %E")
4180 : G_("%Kaccessing %E or more bytes "
4181 "in a region of size %E")),
4182 exp, range[0], size));
4183 }
4184 else
4185 warned = (func
4186 ? warning_at (loc, opt,
4187 (maybe
4188 ? G_("%K%qD may access between %E and %E "
4189 "bytes in a region of size %E")
4190 : G_("%K%qD accessing between %E and %E "
4191 "bytes in a region of size %E")),
4192 exp, func, range[0], range[1],
4193 size)
4194 : warning_at (loc, opt,
4195 (maybe
4196 ? G_("%Kmay access between %E and %E bytes "
4197 "in a region of size %E")
4198 : G_("%Kaccessing between %E and %E bytes "
4199 "in a region of size %E")),
4200 exp, range[0], range[1],
4201 size));
4202 return warned;
4203 }
4204
4205 if (write)
4206 {
4207 if (tree_int_cst_equal (range[0], range[1]))
4208 warned = (func
4209 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4210 (maybe
4211 ? G_("%K%qD may write %E byte into a region "
4212 "of size %E")
4213 : G_("%K%qD writing %E byte into a region "
4214 "of size %E overflows the destination")),
4215 (maybe
4216 ? G_("%K%qD may write %E bytes into a region "
4217 "of size %E")
4218 : G_("%K%qD writing %E bytes into a region "
4219 "of size %E overflows the destination")),
4220 exp, func, range[0], size)
4221 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4222 (maybe
4223 ? G_("%Kmay write %E byte into a region "
4224 "of size %E")
4225 : G_("%Kwriting %E byte into a region "
4226 "of size %E overflows the destination")),
4227 (maybe
4228 ? G_("%Kmay write %E bytes into a region "
4229 "of size %E")
4230 : G_("%Kwriting %E bytes into a region "
4231 "of size %E overflows the destination")),
4232 exp, range[0], size));
4233 else if (tree_int_cst_sign_bit (range[1]))
4234 {
4235 /* Avoid printing the upper bound if it's invalid. */
4236 warned = (func
4237 ? warning_at (loc, opt,
4238 (maybe
4239 ? G_("%K%qD may write %E or more bytes "
4240 "into a region of size %E "
4241 "the destination")
4242 : G_("%K%qD writing %E or more bytes "
4243 "into a region of size %E overflows "
4244 "the destination")),
4245 exp, func, range[0], size)
4246 : warning_at (loc, opt,
4247 (maybe
4248 ? G_("%Kmay write %E or more bytes into "
4249 "a region of size %E")
4250 : G_("%Kwriting %E or more bytes into "
4251 "a region of size %E overflows "
4252 "the destination")),
4253 exp, range[0], size));
4254 }
4255 else
4256 warned = (func
4257 ? warning_at (loc, opt,
4258 (maybe
4259 ? G_("%K%qD may write between %E and %E bytes "
4260 "into a region of size %E")
4261 : G_("%K%qD writing between %E and %E bytes "
4262 "into a region of size %E overflows "
4263 "the destination")),
4264 exp, func, range[0], range[1],
4265 size)
4266 : warning_at (loc, opt,
4267 (maybe
4268 ? G_("%Kmay write between %E and %E bytes "
4269 "into a region of size %E")
4270 : G_("%Kwriting between %E and %E bytes "
4271 "into a region of size %E overflows "
4272 "the destination")),
4273 exp, range[0], range[1],
4274 size));
4275 return warned;
4276 }
4277
4278 if (read)
4279 {
4280 if (tree_int_cst_equal (range[0], range[1]))
4281 warned = (func
4282 ? warning_n (loc, OPT_Wstringop_overread,
4283 tree_to_uhwi (range[0]),
4284 (maybe
4285 ? G_("%K%qD may reade %E byte from a region "
4286 "of size %E")
4287 : G_("%K%qD reading %E byte from a region "
4288 "of size %E")),
4289 (maybe
4290 ? G_("%K%qD may read %E bytes from a region "
4291 "of size %E")
4292 : G_("%K%qD reading %E bytes from a region "
4293 "of size %E")),
4294 exp, func, range[0], size)
4295 : warning_n (loc, OPT_Wstringop_overread,
4296 tree_to_uhwi (range[0]),
4297 (maybe
4298 ? G_("%Kmay read %E byte from a region "
4299 "of size %E")
4300 : G_("%Kreading %E byte from a region "
4301 "of size %E")),
4302 (maybe
4303 ? G_("%Kmay read %E bytes from a region "
4304 "of size %E")
4305 : G_("%Kreading %E bytes from a region "
4306 "of size %E")),
4307 exp, range[0], size));
4308 else if (tree_int_cst_sign_bit (range[1]))
4309 {
4310 /* Avoid printing the upper bound if it's invalid. */
4311 warned = (func
4312 ? warning_at (loc, OPT_Wstringop_overread,
4313 (maybe
4314 ? G_("%K%qD may read %E or more bytes "
4315 "from a region of size %E")
4316 : G_("%K%qD reading %E or more bytes "
4317 "from a region of size %E")),
4318 exp, func, range[0], size)
4319 : warning_at (loc, OPT_Wstringop_overread,
4320 (maybe
4321 ? G_("%Kmay read %E or more bytes "
4322 "from a region of size %E")
4323 : G_("%Kreading %E or more bytes "
4324 "from a region of size %E")),
4325 exp, range[0], size));
4326 }
4327 else
4328 warned = (func
4329 ? warning_at (loc, OPT_Wstringop_overread,
4330 (maybe
4331 ? G_("%K%qD may read between %E and %E bytes "
4332 "from a region of size %E")
4333 : G_("%K%qD reading between %E and %E bytes "
4334 "from a region of size %E")),
4335 exp, func, range[0], range[1], size)
4336 : warning_at (loc, opt,
4337 (maybe
4338 ? G_("%Kmay read between %E and %E bytes "
4339 "from a region of size %E")
4340 : G_("%Kreading between %E and %E bytes "
4341 "from a region of size %E")),
4342 exp, range[0], range[1], size));
4343
4344 if (warned)
4345 TREE_NO_WARNING (exp) = true;
4346
4347 return warned;
4348 }
4349
4350 if (tree_int_cst_equal (range[0], range[1])
4351 || tree_int_cst_sign_bit (range[1]))
4352 warned = (func
4353 ? warning_n (loc, OPT_Wstringop_overread,
4354 tree_to_uhwi (range[0]),
4355 "%K%qD epecting %E byte in a region of size %E",
4356 "%K%qD expecting %E bytes in a region of size %E",
4357 exp, func, range[0], size)
4358 : warning_n (loc, OPT_Wstringop_overread,
4359 tree_to_uhwi (range[0]),
4360 "%Kexpecting %E byte in a region of size %E",
4361 "%Kexpecting %E bytes in a region of size %E",
4362 exp, range[0], size));
4363 else if (tree_int_cst_sign_bit (range[1]))
4364 {
4365 /* Avoid printing the upper bound if it's invalid. */
4366 warned = (func
4367 ? warning_at (loc, OPT_Wstringop_overread,
4368 "%K%qD expecting %E or more bytes in a region "
4369 "of size %E",
4370 exp, func, range[0], size)
4371 : warning_at (loc, OPT_Wstringop_overread,
4372 "%Kexpecting %E or more bytes in a region "
4373 "of size %E",
4374 exp, range[0], size));
4375 }
4376 else
4377 warned = (func
4378 ? warning_at (loc, OPT_Wstringop_overread,
4379 "%K%qD expecting between %E and %E bytes in "
4380 "a region of size %E",
4381 exp, func, range[0], range[1], size)
4382 : warning_at (loc, OPT_Wstringop_overread,
4383 "%Kexpectting between %E and %E bytes in "
4384 "a region of size %E",
4385 exp, range[0], range[1], size));
4386
4387 if (warned)
4388 TREE_NO_WARNING (exp) = true;
4389
4390 return warned;
4391 }
4392
4393 /* Issue one inform message describing each target of an access REF.
4394 WRITE is set for a write access and clear for a read access. */
4395
4396 void
4397 access_ref::inform_access (access_mode mode) const
4398 {
4399 const access_ref &aref = *this;
4400 if (!aref.ref)
4401 return;
4402
4403 if (aref.phi ())
4404 {
4405 /* Set MAXREF to refer to the largest object and fill ALL_REFS
4406 with data for all objects referenced by the PHI arguments. */
4407 access_ref maxref;
4408 auto_vec<access_ref> all_refs;
4409 if (!get_ref (&all_refs, &maxref))
4410 return;
4411
4412 /* Except for MAXREF, the rest of the arguments' offsets need not
4413 reflect one added to the PHI itself. Determine the latter from
4414 MAXREF on which the result is based. */
4415 const offset_int orng[] =
4416 {
4417 offrng[0] - maxref.offrng[0],
4418 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
4419 };
4420
4421 /* Add the final PHI's offset to that of each of the arguments
4422 and recurse to issue an inform message for it. */
4423 for (unsigned i = 0; i != all_refs.length (); ++i)
4424 {
4425 /* Skip any PHIs; those could lead to infinite recursion. */
4426 if (all_refs[i].phi ())
4427 continue;
4428
4429 all_refs[i].add_offset (orng[0], orng[1]);
4430 all_refs[i].inform_access (mode);
4431 }
4432 return;
4433 }
4434
4435 /* Convert offset range and avoid including a zero range since it
4436 isn't necessarily meaningful. */
4437 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
4438 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
4439 HOST_WIDE_INT minoff;
4440 HOST_WIDE_INT maxoff = diff_max;
4441 if (wi::fits_shwi_p (aref.offrng[0]))
4442 minoff = aref.offrng[0].to_shwi ();
4443 else
4444 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
4445
4446 if (wi::fits_shwi_p (aref.offrng[1]))
4447 maxoff = aref.offrng[1].to_shwi ();
4448
4449 if (maxoff <= diff_min || maxoff >= diff_max)
4450 /* Avoid mentioning an upper bound that's equal to or in excess
4451 of the maximum of ptrdiff_t. */
4452 maxoff = minoff;
4453
4454 /* Convert size range and always include it since all sizes are
4455 meaningful. */
4456 unsigned long long minsize = 0, maxsize = 0;
4457 if (wi::fits_shwi_p (aref.sizrng[0])
4458 && wi::fits_shwi_p (aref.sizrng[1]))
4459 {
4460 minsize = aref.sizrng[0].to_shwi ();
4461 maxsize = aref.sizrng[1].to_shwi ();
4462 }
4463
4464 /* SIZRNG doesn't necessarily have the same range as the allocation
4465 size determined by gimple_call_alloc_size (). */
4466 char sizestr[80];
4467 if (minsize == maxsize)
4468 sprintf (sizestr, "%llu", minsize);
4469 else
4470 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
4471
4472 char offstr[80];
4473 if (minoff == 0
4474 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
4475 offstr[0] = '\0';
4476 else if (minoff == maxoff)
4477 sprintf (offstr, "%lli", (long long) minoff);
4478 else
4479 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
4480
4481 location_t loc = UNKNOWN_LOCATION;
4482
4483 tree ref = this->ref;
4484 tree allocfn = NULL_TREE;
4485 if (TREE_CODE (ref) == SSA_NAME)
4486 {
4487 gimple *stmt = SSA_NAME_DEF_STMT (ref);
4488 if (is_gimple_call (stmt))
4489 {
4490 loc = gimple_location (stmt);
4491 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4492 {
4493 /* Strip the SSA_NAME suffix from the variable name and
4494 recreate an identifier with the VLA's original name. */
4495 ref = gimple_call_lhs (stmt);
4496 ref = SSA_NAME_IDENTIFIER (ref);
4497 const char *id = IDENTIFIER_POINTER (ref);
4498 size_t len = strcspn (id, ".$");
4499 if (!len)
4500 len = strlen (id);
4501 ref = get_identifier_with_length (id, len);
4502 }
4503 else
4504 {
4505 /* Except for VLAs, retrieve the allocation function. */
4506 allocfn = gimple_call_fndecl (stmt);
4507 if (!allocfn)
4508 allocfn = gimple_call_fn (stmt);
4509 if (TREE_CODE (allocfn) == SSA_NAME)
4510 {
4511 /* For an ALLOC_CALL via a function pointer make a small
4512 effort to determine the destination of the pointer. */
4513 gimple *def = SSA_NAME_DEF_STMT (allocfn);
4514 if (gimple_assign_single_p (def))
4515 {
4516 tree rhs = gimple_assign_rhs1 (def);
4517 if (DECL_P (rhs))
4518 allocfn = rhs;
4519 else if (TREE_CODE (rhs) == COMPONENT_REF)
4520 allocfn = TREE_OPERAND (rhs, 1);
4521 }
4522 }
4523 }
4524 }
4525 else if (gimple_nop_p (stmt))
4526 /* Handle DECL_PARM below. */
4527 ref = SSA_NAME_VAR (ref);
4528 }
4529
4530 if (DECL_P (ref))
4531 loc = DECL_SOURCE_LOCATION (ref);
4532 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
4533 loc = EXPR_LOCATION (ref);
4534 else if (TREE_CODE (ref) != IDENTIFIER_NODE
4535 && TREE_CODE (ref) != SSA_NAME)
4536 return;
4537
4538 if (mode == access_read_write || mode == access_write_only)
4539 {
4540 if (allocfn == NULL_TREE)
4541 {
4542 if (*offstr)
4543 inform (loc, "at offset %s into destination object %qE of size %s",
4544 offstr, ref, sizestr);
4545 else
4546 inform (loc, "destination object %qE of size %s", ref, sizestr);
4547 return;
4548 }
4549
4550 if (*offstr)
4551 inform (loc,
4552 "at offset %s into destination object of size %s "
4553 "allocated by %qE", offstr, sizestr, allocfn);
4554 else
4555 inform (loc, "destination object of size %s allocated by %qE",
4556 sizestr, allocfn);
4557 return;
4558 }
4559
4560 if (DECL_P (ref))
4561 {
4562 if (*offstr)
4563 inform (loc, "at offset %s into source object %qD of size %s",
4564 offstr, ref, sizestr);
4565 else
4566 inform (loc, "source object %qD of size %s", ref, sizestr);
4567
4568 return;
4569 }
4570
4571 if (*offstr)
4572 inform (loc,
4573 "at offset %s into source object of size %s allocated by %qE",
4574 offstr, sizestr, allocfn);
4575 else
4576 inform (loc, "source object of size %s allocated by %qE",
4577 sizestr, allocfn);
4578 }
4579
4580 /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
4581 by BNDRNG if nonnull and valid. */
4582
4583 static void
4584 get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
4585 {
4586 if (bound)
4587 get_size_range (bound, range);
4588
4589 if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U))
4590 return;
4591
4592 if (range[0] && TREE_CODE (range[0]) == INTEGER_CST)
4593 {
4594 offset_int r[] =
4595 { wi::to_offset (range[0]), wi::to_offset (range[1]) };
4596 if (r[0] < bndrng[0])
4597 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4598 if (bndrng[1] < r[1])
4599 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4600 }
4601 else
4602 {
4603 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4604 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4605 }
4606 }
4607
4608 /* Try to verify that the sizes and lengths of the arguments to a string
4609 manipulation function given by EXP are within valid bounds and that
4610 the operation does not lead to buffer overflow or read past the end.
4611 Arguments other than EXP may be null. When non-null, the arguments
4612 have the following meaning:
4613 DST is the destination of a copy call or NULL otherwise.
4614 SRC is the source of a copy call or NULL otherwise.
4615 DSTWRITE is the number of bytes written into the destination obtained
4616 from the user-supplied size argument to the function (such as in
4617 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
4618 MAXREAD is the user-supplied bound on the length of the source sequence
4619 (such as in strncat(d, s, N). It specifies the upper limit on the number
4620 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
4621 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
4622 expression EXP is a string function call (as opposed to a memory call
4623 like memcpy). As an exception, SRCSTR can also be an integer denoting
4624 the precomputed size of the source string or object (for functions like
4625 memcpy).
4626 DSTSIZE is the size of the destination object.
4627
4628 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
4629 SIZE_MAX.
4630
4631 WRITE is true for write accesses, READ is true for reads. Both are
4632 false for simple size checks in calls to functions that neither read
4633 from nor write to the region.
4634
4635 When nonnull, PAD points to a more detailed description of the access.
4636
4637 If the call is successfully verified as safe return true, otherwise
4638 return false. */
4639
4640 bool
4641 check_access (tree exp, tree dstwrite,
4642 tree maxread, tree srcstr, tree dstsize,
4643 access_mode mode, const access_data *pad /* = NULL */)
4644 {
4645 /* The size of the largest object is half the address space, or
4646 PTRDIFF_MAX. (This is way too permissive.) */
4647 tree maxobjsize = max_object_size ();
4648
4649 /* Either an approximate/minimum the length of the source string for
4650 string functions or the size of the source object for raw memory
4651 functions. */
4652 tree slen = NULL_TREE;
4653
4654 /* The range of the access in bytes; first set to the write access
4655 for functions that write and then read for those that also (or
4656 just) read. */
4657 tree range[2] = { NULL_TREE, NULL_TREE };
4658
4659 /* Set to true when the exact number of bytes written by a string
4660 function like strcpy is not known and the only thing that is
4661 known is that it must be at least one (for the terminating nul). */
4662 bool at_least_one = false;
4663 if (srcstr)
4664 {
4665 /* SRCSTR is normally a pointer to string but as a special case
4666 it can be an integer denoting the length of a string. */
4667 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
4668 {
4669 if (!check_nul_terminated_array (exp, srcstr, maxread))
4670 return false;
4671 /* Try to determine the range of lengths the source string
4672 refers to. If it can be determined and is less than
4673 the upper bound given by MAXREAD add one to it for
4674 the terminating nul. Otherwise, set it to one for
4675 the same reason, or to MAXREAD as appropriate. */
4676 c_strlen_data lendata = { };
4677 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
4678 range[0] = lendata.minlen;
4679 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
4680 if (range[0]
4681 && TREE_CODE (range[0]) == INTEGER_CST
4682 && TREE_CODE (range[1]) == INTEGER_CST
4683 && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
4684 {
4685 if (maxread && tree_int_cst_le (maxread, range[0]))
4686 range[0] = range[1] = maxread;
4687 else
4688 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
4689 range[0], size_one_node);
4690
4691 if (maxread && tree_int_cst_le (maxread, range[1]))
4692 range[1] = maxread;
4693 else if (!integer_all_onesp (range[1]))
4694 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
4695 range[1], size_one_node);
4696
4697 slen = range[0];
4698 }
4699 else
4700 {
4701 at_least_one = true;
4702 slen = size_one_node;
4703 }
4704 }
4705 else
4706 slen = srcstr;
4707 }
4708
4709 if (!dstwrite && !maxread)
4710 {
4711 /* When the only available piece of data is the object size
4712 there is nothing to do. */
4713 if (!slen)
4714 return true;
4715
4716 /* Otherwise, when the length of the source sequence is known
4717 (as with strlen), set DSTWRITE to it. */
4718 if (!range[0])
4719 dstwrite = slen;
4720 }
4721
4722 if (!dstsize)
4723 dstsize = maxobjsize;
4724
4725 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4726 if valid. */
4727 get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL);
4728
4729 tree func = get_callee_fndecl (exp);
4730 /* Read vs write access by built-ins can be determined from the const
4731 qualifiers on the pointer argument. In the absence of attribute
4732 access, non-const qualified pointer arguments to user-defined
4733 functions are assumed to both read and write the objects. */
4734 const bool builtin = func ? fndecl_built_in_p (func) : false;
4735
4736 /* First check the number of bytes to be written against the maximum
4737 object size. */
4738 if (range[0]
4739 && TREE_CODE (range[0]) == INTEGER_CST
4740 && tree_int_cst_lt (maxobjsize, range[0]))
4741 {
4742 location_t loc = tree_inlined_location (exp);
4743 maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
4744 NULL_TREE, pad);
4745 return false;
4746 }
4747
4748 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4749 constant, and in range of unsigned HOST_WIDE_INT. */
4750 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
4751
4752 /* Next check the number of bytes to be written against the destination
4753 object size. */
4754 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
4755 {
4756 if (range[0]
4757 && TREE_CODE (range[0]) == INTEGER_CST
4758 && ((tree_fits_uhwi_p (dstsize)
4759 && tree_int_cst_lt (dstsize, range[0]))
4760 || (dstwrite
4761 && tree_fits_uhwi_p (dstwrite)
4762 && tree_int_cst_lt (dstwrite, range[0]))))
4763 {
4764 if (TREE_NO_WARNING (exp)
4765 || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref)))
4766 return false;
4767
4768 location_t loc = tree_inlined_location (exp);
4769 bool warned = false;
4770 if (dstwrite == slen && at_least_one)
4771 {
4772 /* This is a call to strcpy with a destination of 0 size
4773 and a source of unknown length. The call will write
4774 at least one byte past the end of the destination. */
4775 warned = (func
4776 ? warning_at (loc, OPT_Wstringop_overflow_,
4777 "%K%qD writing %E or more bytes into "
4778 "a region of size %E overflows "
4779 "the destination",
4780 exp, func, range[0], dstsize)
4781 : warning_at (loc, OPT_Wstringop_overflow_,
4782 "%Kwriting %E or more bytes into "
4783 "a region of size %E overflows "
4784 "the destination",
4785 exp, range[0], dstsize));
4786 }
4787 else
4788 {
4789 const bool read
4790 = mode == access_read_only || mode == access_read_write;
4791 const bool write
4792 = mode == access_write_only || mode == access_read_write;
4793 const bool maybe = pad && pad->dst.parmarray;
4794 warned = warn_for_access (loc, func, exp,
4795 OPT_Wstringop_overflow_,
4796 range, dstsize,
4797 write, read && !builtin, maybe);
4798 }
4799
4800 if (warned)
4801 {
4802 TREE_NO_WARNING (exp) = true;
4803 if (pad)
4804 pad->dst.inform_access (pad->mode);
4805 }
4806
4807 /* Return error when an overflow has been detected. */
4808 return false;
4809 }
4810 }
4811
4812 /* Check the maximum length of the source sequence against the size
4813 of the destination object if known, or against the maximum size
4814 of an object. */
4815 if (maxread)
4816 {
4817 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4818 PAD is nonnull and BNDRNG is valid. */
4819 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4820
4821 location_t loc = tree_inlined_location (exp);
4822 tree size = dstsize;
4823 if (pad && pad->mode == access_read_only)
4824 size = wide_int_to_tree (sizetype, pad->src.sizrng[1]);
4825
4826 if (range[0] && maxread && tree_fits_uhwi_p (size))
4827 {
4828 if (tree_int_cst_lt (maxobjsize, range[0]))
4829 {
4830 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
4831 range, size, pad);
4832 return false;
4833 }
4834
4835 if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
4836 {
4837 int opt = (dstwrite || mode != access_read_only
4838 ? OPT_Wstringop_overflow_
4839 : OPT_Wstringop_overread);
4840 maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
4841 return false;
4842 }
4843 }
4844
4845 maybe_warn_nonstring_arg (func, exp);
4846 }
4847
4848 /* Check for reading past the end of SRC. */
4849 bool overread = (slen
4850 && slen == srcstr
4851 && dstwrite
4852 && range[0]
4853 && TREE_CODE (slen) == INTEGER_CST
4854 && tree_int_cst_lt (slen, range[0]));
4855 /* If none is determined try to get a better answer based on the details
4856 in PAD. */
4857 if (!overread
4858 && pad
4859 && pad->src.sizrng[1] >= 0
4860 && pad->src.offrng[0] >= 0
4861 && (pad->src.offrng[1] < 0
4862 || pad->src.offrng[0] <= pad->src.offrng[1]))
4863 {
4864 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4865 PAD is nonnull and BNDRNG is valid. */
4866 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4867 /* Set OVERREAD for reads starting just past the end of an object. */
4868 overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4869 range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]);
4870 slen = size_zero_node;
4871 }
4872
4873 if (overread)
4874 {
4875 if (TREE_NO_WARNING (exp)
4876 || (srcstr && TREE_NO_WARNING (srcstr))
4877 || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref)))
4878 return false;
4879
4880 location_t loc = tree_inlined_location (exp);
4881 const bool read
4882 = mode == access_read_only || mode == access_read_write;
4883 const bool maybe = pad && pad->dst.parmarray;
4884 if (warn_for_access (loc, func, exp, OPT_Wstringop_overread, range,
4885 slen, false, read, maybe))
4886 {
4887 TREE_NO_WARNING (exp) = true;
4888 if (pad)
4889 pad->src.inform_access (access_read_only);
4890 }
4891 return false;
4892 }
4893
4894 return true;
4895 }
4896
4897 /* A convenience wrapper for check_access above to check access
4898 by a read-only function like puts. */
4899
4900 static bool
4901 check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4902 int ost /* = 1 */)
4903 {
4904 if (!warn_stringop_overread)
4905 return true;
4906
4907 access_data data (exp, access_read_only, NULL_TREE, false, bound, true);
4908 compute_objsize (src, ost, &data.src);
4909 return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound,
4910 /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode,
4911 &data);
4912 }
4913
4914 /* If STMT is a call to an allocation function, returns the constant
4915 maximum size of the object allocated by the call represented as
4916 sizetype. If nonnull, sets RNG1[] to the range of the size.
4917 When nonnull, uses RVALS for range information, otherwise calls
4918 get_range_info to get it.
4919 Returns null when STMT is not a call to a valid allocation function. */
4920
4921 tree
4922 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
4923 range_query * /* = NULL */)
4924 {
4925 if (!stmt)
4926 return NULL_TREE;
4927
4928 tree allocfntype;
4929 if (tree fndecl = gimple_call_fndecl (stmt))
4930 allocfntype = TREE_TYPE (fndecl);
4931 else
4932 allocfntype = gimple_call_fntype (stmt);
4933
4934 if (!allocfntype)
4935 return NULL_TREE;
4936
4937 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
4938 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
4939 if (!at)
4940 {
4941 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4942 return NULL_TREE;
4943
4944 argidx1 = 0;
4945 }
4946
4947 unsigned nargs = gimple_call_num_args (stmt);
4948
4949 if (argidx1 == UINT_MAX)
4950 {
4951 tree atval = TREE_VALUE (at);
4952 if (!atval)
4953 return NULL_TREE;
4954
4955 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4956 if (nargs <= argidx1)
4957 return NULL_TREE;
4958
4959 atval = TREE_CHAIN (atval);
4960 if (atval)
4961 {
4962 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4963 if (nargs <= argidx2)
4964 return NULL_TREE;
4965 }
4966 }
4967
4968 tree size = gimple_call_arg (stmt, argidx1);
4969
4970 wide_int rng1_buf[2];
4971 /* If RNG1 is not set, use the buffer. */
4972 if (!rng1)
4973 rng1 = rng1_buf;
4974
4975 /* Use maximum precision to avoid overflow below. */
4976 const int prec = ADDR_MAX_PRECISION;
4977
4978 {
4979 tree r[2];
4980 /* Determine the largest valid range size, including zero. */
4981 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4982 return NULL_TREE;
4983 rng1[0] = wi::to_wide (r[0], prec);
4984 rng1[1] = wi::to_wide (r[1], prec);
4985 }
4986
4987 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
4988 return fold_convert (sizetype, size);
4989
4990 /* To handle ranges do the math in wide_int and return the product
4991 of the upper bounds as a constant. Ignore anti-ranges. */
4992 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
4993 wide_int rng2[2];
4994 {
4995 tree r[2];
4996 /* As above, use the full non-negative range on failure. */
4997 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4998 return NULL_TREE;
4999 rng2[0] = wi::to_wide (r[0], prec);
5000 rng2[1] = wi::to_wide (r[1], prec);
5001 }
5002
5003 /* Compute products of both bounds for the caller but return the lesser
5004 of SIZE_MAX and the product of the upper bounds as a constant. */
5005 rng1[0] = rng1[0] * rng2[0];
5006 rng1[1] = rng1[1] * rng2[1];
5007
5008 const tree size_max = TYPE_MAX_VALUE (sizetype);
5009 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
5010 {
5011 rng1[1] = wi::to_wide (size_max, prec);
5012 return size_max;
5013 }
5014
5015 return wide_int_to_tree (sizetype, rng1[1]);
5016 }
5017
5018 /* For an access to an object referenced to by the function parameter PTR
5019 of pointer type, and set RNG[] to the range of sizes of the object
5020 obtainedfrom the attribute access specification for the current function.
5021 Set STATIC_ARRAY if the array parameter has been declared [static].
5022 Return the function parameter on success and null otherwise. */
5023
5024 tree
5025 gimple_parm_array_size (tree ptr, wide_int rng[2],
5026 bool *static_array /* = NULL */)
5027 {
5028 /* For a function argument try to determine the byte size of the array
5029 from the current function declaratation (e.g., attribute access or
5030 related). */
5031 tree var = SSA_NAME_VAR (ptr);
5032 if (TREE_CODE (var) != PARM_DECL)
5033 return NULL_TREE;
5034
5035 const unsigned prec = TYPE_PRECISION (sizetype);
5036
5037 rdwr_map rdwr_idx;
5038 attr_access *access = get_parm_access (rdwr_idx, var);
5039 if (!access)
5040 return NULL_TREE;
5041
5042 if (access->sizarg != UINT_MAX)
5043 {
5044 /* TODO: Try to extract the range from the argument based on
5045 those of subsequent assertions or based on known calls to
5046 the current function. */
5047 return NULL_TREE;
5048 }
5049
5050 if (!access->minsize)
5051 return NULL_TREE;
5052
5053 /* Only consider ordinary array bound at level 2 (or above if it's
5054 ever added). */
5055 if (warn_array_parameter < 2 && !access->static_p)
5056 return NULL_TREE;
5057
5058 if (static_array)
5059 *static_array = access->static_p;
5060
5061 rng[0] = wi::zero (prec);
5062 rng[1] = wi::uhwi (access->minsize, prec);
5063 /* Multiply the array bound encoded in the attribute by the size
5064 of what the pointer argument to which it decays points to. */
5065 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
5066 tree size = TYPE_SIZE_UNIT (eltype);
5067 if (!size || TREE_CODE (size) != INTEGER_CST)
5068 return NULL_TREE;
5069
5070 rng[1] *= wi::to_wide (size, prec);
5071 return var;
5072 }
5073
5074 /* Wrapper around the wide_int overload of get_range that accepts
5075 offset_int instead. For middle end expressions returns the same
5076 result. For a subset of nonconstamt expressions emitted by the front
5077 end determines a more precise range than would be possible otherwise. */
5078
5079 static bool
5080 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
5081 {
5082 offset_int add = 0;
5083 if (TREE_CODE (x) == PLUS_EXPR)
5084 {
5085 /* Handle constant offsets in pointer addition expressions seen
5086 n the front end IL. */
5087 tree op = TREE_OPERAND (x, 1);
5088 if (TREE_CODE (op) == INTEGER_CST)
5089 {
5090 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
5091 add = wi::to_offset (op);
5092 x = TREE_OPERAND (x, 0);
5093 }
5094 }
5095
5096 if (TREE_CODE (x) == NOP_EXPR)
5097 /* Also handle conversions to sizetype seen in the front end IL. */
5098 x = TREE_OPERAND (x, 0);
5099
5100 tree type = TREE_TYPE (x);
5101 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
5102 return false;
5103
5104 if (TREE_CODE (x) != INTEGER_CST
5105 && TREE_CODE (x) != SSA_NAME)
5106 {
5107 if (TYPE_UNSIGNED (type)
5108 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
5109 type = signed_type_for (type);
5110
5111 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
5112 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
5113 return x;
5114 }
5115
5116 wide_int wr[2];
5117 if (!get_range (x, stmt, wr, rvals))
5118 return false;
5119
5120 signop sgn = SIGNED;
5121 /* Only convert signed integers or unsigned sizetype to a signed
5122 offset and avoid converting large positive values in narrower
5123 types to negative offsets. */
5124 if (TYPE_UNSIGNED (type)
5125 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
5126 sgn = UNSIGNED;
5127
5128 r[0] = offset_int::from (wr[0], sgn);
5129 r[1] = offset_int::from (wr[1], sgn);
5130 return true;
5131 }
5132
5133 /* Return the argument that the call STMT to a built-in function returns
5134 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
5135 from the argument reflected in the value returned by the built-in if it
5136 can be determined, otherwise to 0 and HWI_M1U respectively. */
5137
5138 static tree
5139 gimple_call_return_array (gimple *stmt, offset_int offrng[2],
5140 range_query *rvals)
5141 {
5142 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5143 || gimple_call_num_args (stmt) < 1)
5144 return NULL_TREE;
5145
5146 tree fn = gimple_call_fndecl (stmt);
5147 switch (DECL_FUNCTION_CODE (fn))
5148 {
5149 case BUILT_IN_MEMCPY:
5150 case BUILT_IN_MEMCPY_CHK:
5151 case BUILT_IN_MEMMOVE:
5152 case BUILT_IN_MEMMOVE_CHK:
5153 case BUILT_IN_MEMSET:
5154 case BUILT_IN_STPCPY:
5155 case BUILT_IN_STPCPY_CHK:
5156 case BUILT_IN_STPNCPY:
5157 case BUILT_IN_STPNCPY_CHK:
5158 case BUILT_IN_STRCAT:
5159 case BUILT_IN_STRCAT_CHK:
5160 case BUILT_IN_STRCPY:
5161 case BUILT_IN_STRCPY_CHK:
5162 case BUILT_IN_STRNCAT:
5163 case BUILT_IN_STRNCAT_CHK:
5164 case BUILT_IN_STRNCPY:
5165 case BUILT_IN_STRNCPY_CHK:
5166 offrng[0] = offrng[1] = 0;
5167 return gimple_call_arg (stmt, 0);
5168
5169 case BUILT_IN_MEMPCPY:
5170 case BUILT_IN_MEMPCPY_CHK:
5171 {
5172 tree off = gimple_call_arg (stmt, 2);
5173 if (!get_offset_range (off, stmt, offrng, rvals))
5174 {
5175 offrng[0] = 0;
5176 offrng[1] = HOST_WIDE_INT_M1U;
5177 }
5178 return gimple_call_arg (stmt, 0);
5179 }
5180
5181 case BUILT_IN_MEMCHR:
5182 {
5183 tree off = gimple_call_arg (stmt, 2);
5184 if (get_offset_range (off, stmt, offrng, rvals))
5185 offrng[0] = 0;
5186 else
5187 {
5188 offrng[0] = 0;
5189 offrng[1] = HOST_WIDE_INT_M1U;
5190 }
5191 return gimple_call_arg (stmt, 0);
5192 }
5193
5194 case BUILT_IN_STRCHR:
5195 case BUILT_IN_STRRCHR:
5196 case BUILT_IN_STRSTR:
5197 {
5198 offrng[0] = 0;
5199 offrng[1] = HOST_WIDE_INT_M1U;
5200 }
5201 return gimple_call_arg (stmt, 0);
5202
5203 default:
5204 break;
5205 }
5206
5207 return NULL_TREE;
5208 }
5209
5210 /* A helper of compute_objsize() to determine the size from an assignment
5211 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. */
5212
5213 static bool
5214 handle_min_max_size (gimple *stmt, int ostype, access_ref *pref,
5215 ssa_name_limit_t &snlim, pointer_query *qry)
5216 {
5217 tree_code code = gimple_assign_rhs_code (stmt);
5218
5219 tree ptr = gimple_assign_rhs1 (stmt);
5220
5221 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
5222 Determine the size/offset of each and use the one with more or less
5223 space remaining, respectively. If either fails, use the information
5224 determined from the other instead, adjusted up or down as appropriate
5225 for the expression. */
5226 access_ref aref[2] = { *pref, *pref };
5227 if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry))
5228 {
5229 aref[0].base0 = false;
5230 aref[0].offrng[0] = aref[0].offrng[1] = 0;
5231 aref[0].add_max_offset ();
5232 aref[0].set_max_size_range ();
5233 }
5234
5235 ptr = gimple_assign_rhs2 (stmt);
5236 if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry))
5237 {
5238 aref[1].base0 = false;
5239 aref[1].offrng[0] = aref[1].offrng[1] = 0;
5240 aref[1].add_max_offset ();
5241 aref[1].set_max_size_range ();
5242 }
5243
5244 if (!aref[0].ref && !aref[1].ref)
5245 /* Fail if the identity of neither argument could be determined. */
5246 return false;
5247
5248 bool i0 = false;
5249 if (aref[0].ref && aref[0].base0)
5250 {
5251 if (aref[1].ref && aref[1].base0)
5252 {
5253 /* If the object referenced by both arguments has been determined
5254 set *PREF to the one with more or less space remainng, whichever
5255 is appopriate for CODE.
5256 TODO: Indicate when the objects are distinct so it can be
5257 diagnosed. */
5258 i0 = code == MAX_EXPR;
5259 const bool i1 = !i0;
5260
5261 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
5262 *pref = aref[i1];
5263 else
5264 *pref = aref[i0];
5265 return true;
5266 }
5267
5268 /* If only the object referenced by one of the arguments could be
5269 determined, use it and... */
5270 *pref = aref[0];
5271 i0 = true;
5272 }
5273 else
5274 *pref = aref[1];
5275
5276 const bool i1 = !i0;
5277 /* ...see if the offset obtained from the other pointer can be used
5278 to tighten up the bound on the offset obtained from the first. */
5279 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
5280 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
5281 {
5282 pref->offrng[0] = aref[i0].offrng[0];
5283 pref->offrng[1] = aref[i0].offrng[1];
5284 }
5285 return true;
5286 }
5287
5288 /* Helper to compute the size of the object referenced by the PTR
5289 expression which must have pointer type, using Object Size type
5290 OSTYPE (only the least significant 2 bits are used).
5291 On success, sets PREF->REF to the DECL of the referenced object
5292 if it's unique, otherwise to null, PREF->OFFRNG to the range of
5293 offsets into it, and PREF->SIZRNG to the range of sizes of
5294 the object(s).
5295 SNLIM is used to avoid visiting the same PHI operand multiple
5296 times, and, when nonnull, RVALS to determine range information.
5297 Returns true on success, false when a meaningful size (or range)
5298 cannot be determined.
5299
5300 The function is intended for diagnostics and should not be used
5301 to influence code generation or optimization. */
5302
5303 static bool
5304 compute_objsize_r (tree ptr, int ostype, access_ref *pref,
5305 ssa_name_limit_t &snlim, pointer_query *qry)
5306 {
5307 STRIP_NOPS (ptr);
5308
5309 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
5310 if (addr)
5311 {
5312 --pref->deref;
5313 ptr = TREE_OPERAND (ptr, 0);
5314 }
5315
5316 if (DECL_P (ptr))
5317 {
5318 pref->ref = ptr;
5319
5320 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
5321 {
5322 /* Set the maximum size if the reference is to the pointer
5323 itself (as opposed to what it points to). */
5324 pref->set_max_size_range ();
5325 return true;
5326 }
5327
5328 if (tree size = decl_init_size (ptr, false))
5329 if (TREE_CODE (size) == INTEGER_CST)
5330 {
5331 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5332 return true;
5333 }
5334
5335 pref->set_max_size_range ();
5336 return true;
5337 }
5338
5339 const tree_code code = TREE_CODE (ptr);
5340 range_query *const rvals = qry ? qry->rvals : NULL;
5341
5342 if (code == BIT_FIELD_REF)
5343 {
5344 tree ref = TREE_OPERAND (ptr, 0);
5345 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5346 return false;
5347
5348 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
5349 pref->add_offset (off / BITS_PER_UNIT);
5350 return true;
5351 }
5352
5353 if (code == COMPONENT_REF)
5354 {
5355 tree ref = TREE_OPERAND (ptr, 0);
5356 if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
5357 /* In accesses through union types consider the entire unions
5358 rather than just their members. */
5359 ostype = 0;
5360 tree field = TREE_OPERAND (ptr, 1);
5361
5362 if (ostype == 0)
5363 {
5364 /* In OSTYPE zero (for raw memory functions like memcpy), use
5365 the maximum size instead if the identity of the enclosing
5366 object cannot be determined. */
5367 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5368 return false;
5369
5370 /* Otherwise, use the size of the enclosing object and add
5371 the offset of the member to the offset computed so far. */
5372 tree offset = byte_position (field);
5373 if (TREE_CODE (offset) == INTEGER_CST)
5374 pref->add_offset (wi::to_offset (offset));
5375 else
5376 pref->add_max_offset ();
5377
5378 if (!pref->ref)
5379 /* REF may have been already set to an SSA_NAME earlier
5380 to provide better context for diagnostics. In that case,
5381 leave it unchanged. */
5382 pref->ref = ref;
5383 return true;
5384 }
5385
5386 pref->ref = field;
5387
5388 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
5389 {
5390 /* Set maximum size if the reference is to the pointer member
5391 itself (as opposed to what it points to). */
5392 pref->set_max_size_range ();
5393 return true;
5394 }
5395
5396 /* SAM is set for array members that might need special treatment. */
5397 special_array_member sam;
5398 tree size = component_ref_size (ptr, &sam);
5399 if (sam == special_array_member::int_0)
5400 pref->sizrng[0] = pref->sizrng[1] = 0;
5401 else if (!pref->trail1special && sam == special_array_member::trail_1)
5402 pref->sizrng[0] = pref->sizrng[1] = 1;
5403 else if (size && TREE_CODE (size) == INTEGER_CST)
5404 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5405 else
5406 {
5407 /* When the size of the member is unknown it's either a flexible
5408 array member or a trailing special array member (either zero
5409 length or one-element). Set the size to the maximum minus
5410 the constant size of the type. */
5411 pref->sizrng[0] = 0;
5412 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5413 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
5414 if (TREE_CODE (recsize) == INTEGER_CST)
5415 pref->sizrng[1] -= wi::to_offset (recsize);
5416 }
5417 return true;
5418 }
5419
5420 if (code == ARRAY_REF || code == MEM_REF)
5421 {
5422 ++pref->deref;
5423
5424 tree ref = TREE_OPERAND (ptr, 0);
5425 tree reftype = TREE_TYPE (ref);
5426 if (!addr && code == ARRAY_REF
5427 && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
5428 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
5429 of known bound. */
5430 return false;
5431
5432 if (code == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
5433 {
5434 /* Give up for MEM_REFs of vector types; those may be synthesized
5435 from multiple assignments to consecutive data members. See PR
5436 93200.
5437 FIXME: Deal with this more generally, e.g., by marking up such
5438 MEM_REFs at the time they're created. */
5439 reftype = TREE_TYPE (reftype);
5440 if (TREE_CODE (reftype) == VECTOR_TYPE)
5441 return false;
5442 }
5443
5444 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5445 return false;
5446
5447 offset_int orng[2];
5448 tree off = pref->eval (TREE_OPERAND (ptr, 1));
5449 if (!get_offset_range (off, NULL, orng, rvals))
5450 {
5451 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
5452 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5453 orng[0] = -orng[1] - 1;
5454 }
5455
5456 if (TREE_CODE (ptr) == ARRAY_REF)
5457 {
5458 /* Convert the array index range determined above to a byte
5459 offset. */
5460 tree lowbnd = array_ref_low_bound (ptr);
5461 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
5462 {
5463 /* Adjust the index by the low bound of the array domain
5464 (normally zero but 1 in Fortran). */
5465 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
5466 orng[0] -= lb;
5467 orng[1] -= lb;
5468 }
5469
5470 tree eltype = TREE_TYPE (ptr);
5471 tree tpsize = TYPE_SIZE_UNIT (eltype);
5472 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
5473 {
5474 pref->add_max_offset ();
5475 return true;
5476 }
5477
5478 offset_int sz = wi::to_offset (tpsize);
5479 orng[0] *= sz;
5480 orng[1] *= sz;
5481
5482 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
5483 {
5484 /* Except for the permissive raw memory functions which use
5485 the size of the whole object determined above, use the size
5486 of the referenced array. Because the overall offset is from
5487 the beginning of the complete array object add this overall
5488 offset to the size of array. */
5489 offset_int sizrng[2] =
5490 {
5491 pref->offrng[0] + orng[0] + sz,
5492 pref->offrng[1] + orng[1] + sz
5493 };
5494 if (sizrng[1] < sizrng[0])
5495 std::swap (sizrng[0], sizrng[1]);
5496 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
5497 pref->sizrng[0] = sizrng[0];
5498 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
5499 pref->sizrng[1] = sizrng[1];
5500 }
5501 }
5502
5503 pref->add_offset (orng[0], orng[1]);
5504 return true;
5505 }
5506
5507 if (code == TARGET_MEM_REF)
5508 {
5509 tree ref = TREE_OPERAND (ptr, 0);
5510 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5511 return false;
5512
5513 /* TODO: Handle remaining operands. Until then, add maximum offset. */
5514 pref->ref = ptr;
5515 pref->add_max_offset ();
5516 return true;
5517 }
5518
5519 if (code == INTEGER_CST)
5520 {
5521 /* Pointer constants other than null are most likely the result
5522 of erroneous null pointer addition/subtraction. Set size to
5523 zero. For null pointers, set size to the maximum for now
5524 since those may be the result of jump threading. */
5525 if (integer_zerop (ptr))
5526 pref->set_max_size_range ();
5527 else
5528 pref->sizrng[0] = pref->sizrng[1] = 0;
5529 pref->ref = ptr;
5530
5531 return true;
5532 }
5533
5534 if (code == STRING_CST)
5535 {
5536 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
5537 pref->ref = ptr;
5538 return true;
5539 }
5540
5541 if (code == POINTER_PLUS_EXPR)
5542 {
5543 tree ref = TREE_OPERAND (ptr, 0);
5544 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5545 return false;
5546
5547 /* Clear DEREF since the offset is being applied to the target
5548 of the dereference. */
5549 pref->deref = 0;
5550
5551 offset_int orng[2];
5552 tree off = pref->eval (TREE_OPERAND (ptr, 1));
5553 if (get_offset_range (off, NULL, orng, rvals))
5554 pref->add_offset (orng[0], orng[1]);
5555 else
5556 pref->add_max_offset ();
5557 return true;
5558 }
5559
5560 if (code == VIEW_CONVERT_EXPR)
5561 {
5562 ptr = TREE_OPERAND (ptr, 0);
5563 return compute_objsize_r (ptr, ostype, pref, snlim, qry);
5564 }
5565
5566 if (code == SSA_NAME)
5567 {
5568 if (!snlim.next ())
5569 return false;
5570
5571 /* Only process an SSA_NAME if the recursion limit has not yet
5572 been reached. */
5573 if (qry)
5574 {
5575 if (++qry->depth)
5576 qry->max_depth = qry->depth;
5577 if (const access_ref *cache_ref = qry->get_ref (ptr))
5578 {
5579 /* If the pointer is in the cache set *PREF to what it refers
5580 to and return success. */
5581 *pref = *cache_ref;
5582 return true;
5583 }
5584 }
5585
5586 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
5587 if (is_gimple_call (stmt))
5588 {
5589 /* If STMT is a call to an allocation function get the size
5590 from its argument(s). If successful, also set *PREF->REF
5591 to PTR for the caller to include in diagnostics. */
5592 wide_int wr[2];
5593 if (gimple_call_alloc_size (stmt, wr, rvals))
5594 {
5595 pref->ref = ptr;
5596 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5597 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5598 /* Constrain both bounds to a valid size. */
5599 offset_int maxsize = wi::to_offset (max_object_size ());
5600 if (pref->sizrng[0] > maxsize)
5601 pref->sizrng[0] = maxsize;
5602 if (pref->sizrng[1] > maxsize)
5603 pref->sizrng[1] = maxsize;
5604 }
5605 else
5606 {
5607 /* For functions known to return one of their pointer arguments
5608 try to determine what the returned pointer points to, and on
5609 success add OFFRNG which was set to the offset added by
5610 the function (e.g., memchr) to the overall offset. */
5611 offset_int offrng[2];
5612 if (tree ret = gimple_call_return_array (stmt, offrng, rvals))
5613 {
5614 if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
5615 return false;
5616
5617 /* Cap OFFRNG[1] to at most the remaining size of
5618 the object. */
5619 offset_int remrng[2];
5620 remrng[1] = pref->size_remaining (remrng);
5621 if (remrng[1] < offrng[1])
5622 offrng[1] = remrng[1];
5623 pref->add_offset (offrng[0], offrng[1]);
5624 }
5625 else
5626 {
5627 /* For other calls that might return arbitrary pointers
5628 including into the middle of objects set the size
5629 range to maximum, clear PREF->BASE0, and also set
5630 PREF->REF to include in diagnostics. */
5631 pref->set_max_size_range ();
5632 pref->base0 = false;
5633 pref->ref = ptr;
5634 }
5635 }
5636 qry->put_ref (ptr, *pref);
5637 return true;
5638 }
5639
5640 if (gimple_nop_p (stmt))
5641 {
5642 /* For a function argument try to determine the byte size
5643 of the array from the current function declaratation
5644 (e.g., attribute access or related). */
5645 wide_int wr[2];
5646 bool static_array = false;
5647 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
5648 {
5649 pref->parmarray = !static_array;
5650 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5651 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5652 pref->ref = ref;
5653 qry->put_ref (ptr, *pref);
5654 return true;
5655 }
5656
5657 pref->set_max_size_range ();
5658 pref->base0 = false;
5659 pref->ref = ptr;
5660 qry->put_ref (ptr, *pref);
5661 return true;
5662 }
5663
5664 if (gimple_code (stmt) == GIMPLE_PHI)
5665 {
5666 pref->ref = ptr;
5667 access_ref phi_ref = *pref;
5668 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
5669 return false;
5670 *pref = phi_ref;
5671 pref->ref = ptr;
5672 qry->put_ref (ptr, *pref);
5673 return true;
5674 }
5675
5676 if (!is_gimple_assign (stmt))
5677 {
5678 /* Clear BASE0 since the assigned pointer might point into
5679 the middle of the object, set the maximum size range and,
5680 if the SSA_NAME refers to a function argumnent, set
5681 PREF->REF to it. */
5682 pref->base0 = false;
5683 pref->set_max_size_range ();
5684 pref->ref = ptr;
5685 return true;
5686 }
5687
5688 tree_code code = gimple_assign_rhs_code (stmt);
5689
5690 if (code == MAX_EXPR || code == MIN_EXPR)
5691 {
5692 if (!handle_min_max_size (stmt, ostype, pref, snlim, qry))
5693 return false;
5694 qry->put_ref (ptr, *pref);
5695 return true;
5696 }
5697
5698 tree rhs = gimple_assign_rhs1 (stmt);
5699
5700 if (code == POINTER_PLUS_EXPR
5701 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
5702 {
5703 /* Compute the size of the object first. */
5704 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
5705 return false;
5706
5707 offset_int orng[2];
5708 tree off = gimple_assign_rhs2 (stmt);
5709 if (get_offset_range (off, stmt, orng, rvals))
5710 pref->add_offset (orng[0], orng[1]);
5711 else
5712 pref->add_max_offset ();
5713 qry->put_ref (ptr, *pref);
5714 return true;
5715 }
5716
5717 if (code == ADDR_EXPR
5718 || code == SSA_NAME)
5719 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
5720
5721 /* (This could also be an assignment from a nonlocal pointer.) Save
5722 PTR to mention in diagnostics but otherwise treat it as a pointer
5723 to an unknown object. */
5724 pref->ref = rhs;
5725 pref->base0 = false;
5726 pref->set_max_size_range ();
5727 return true;
5728 }
5729
5730 /* Assume all other expressions point into an unknown object
5731 of the maximum valid size. */
5732 pref->ref = ptr;
5733 pref->base0 = false;
5734 pref->set_max_size_range ();
5735 if (TREE_CODE (ptr) == SSA_NAME)
5736 qry->put_ref (ptr, *pref);
5737 return true;
5738 }
5739
5740 /* A "public" wrapper around the above. Clients should use this overload
5741 instead. */
5742
5743 tree
5744 compute_objsize (tree ptr, int ostype, access_ref *pref,
5745 range_query *rvals /* = NULL */)
5746 {
5747 pointer_query qry;
5748 qry.rvals = rvals;
5749 ssa_name_limit_t snlim;
5750 if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
5751 return NULL_TREE;
5752
5753 offset_int maxsize = pref->size_remaining ();
5754 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5755 pref->offrng[0] = 0;
5756 return wide_int_to_tree (sizetype, maxsize);
5757 }
5758
5759 /* Transitional wrapper. The function should be removed once callers
5760 transition to the pointer_query API. */
5761
5762 tree
5763 compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
5764 {
5765 pointer_query qry;
5766 if (ptr_qry)
5767 ptr_qry->depth = 0;
5768 else
5769 ptr_qry = &qry;
5770
5771 ssa_name_limit_t snlim;
5772 if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
5773 return NULL_TREE;
5774
5775 offset_int maxsize = pref->size_remaining ();
5776 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5777 pref->offrng[0] = 0;
5778 return wide_int_to_tree (sizetype, maxsize);
5779 }
5780
5781 /* Legacy wrapper around the above. The function should be removed
5782 once callers transition to one of the two above. */
5783
5784 tree
5785 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
5786 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
5787 {
5788 /* Set the initial offsets to zero and size to negative to indicate
5789 none has been computed yet. */
5790 access_ref ref;
5791 tree size = compute_objsize (ptr, ostype, &ref, rvals);
5792 if (!size || !ref.base0)
5793 return NULL_TREE;
5794
5795 if (pdecl)
5796 *pdecl = ref.ref;
5797
5798 if (poff)
5799 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
5800
5801 return size;
5802 }
5803
5804 /* Helper to determine and check the sizes of the source and the destination
5805 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
5806 call expression, DEST is the destination argument, SRC is the source
5807 argument or null, and LEN is the number of bytes. Use Object Size type-0
5808 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5809 (no overflow or invalid sizes), false otherwise. */
5810
5811 static bool
5812 check_memop_access (tree exp, tree dest, tree src, tree size)
5813 {
5814 /* For functions like memset and memcpy that operate on raw memory
5815 try to determine the size of the largest source and destination
5816 object using type-0 Object Size regardless of the object size
5817 type specified by the option. */
5818 access_data data (exp, access_read_write);
5819 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
5820 tree dstsize = compute_objsize (dest, 0, &data.dst);
5821
5822 return check_access (exp, size, /*maxread=*/NULL_TREE,
5823 srcsize, dstsize, data.mode, &data);
5824 }
5825
5826 /* Validate memchr arguments without performing any expansion.
5827 Return NULL_RTX. */
5828
5829 static rtx
5830 expand_builtin_memchr (tree exp, rtx)
5831 {
5832 if (!validate_arglist (exp,
5833 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5834 return NULL_RTX;
5835
5836 tree arg1 = CALL_EXPR_ARG (exp, 0);
5837 tree len = CALL_EXPR_ARG (exp, 2);
5838
5839 check_read_access (exp, arg1, len, 0);
5840
5841 return NULL_RTX;
5842 }
5843
5844 /* Expand a call EXP to the memcpy builtin.
5845 Return NULL_RTX if we failed, the caller should emit a normal call,
5846 otherwise try to get the result in TARGET, if convenient (and in
5847 mode MODE if that's convenient). */
5848
5849 static rtx
5850 expand_builtin_memcpy (tree exp, rtx target)
5851 {
5852 if (!validate_arglist (exp,
5853 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5854 return NULL_RTX;
5855
5856 tree dest = CALL_EXPR_ARG (exp, 0);
5857 tree src = CALL_EXPR_ARG (exp, 1);
5858 tree len = CALL_EXPR_ARG (exp, 2);
5859
5860 check_memop_access (exp, dest, src, len);
5861
5862 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5863 /*retmode=*/ RETURN_BEGIN, false);
5864 }
5865
5866 /* Check a call EXP to the memmove built-in for validity.
5867 Return NULL_RTX on both success and failure. */
5868
5869 static rtx
5870 expand_builtin_memmove (tree exp, rtx target)
5871 {
5872 if (!validate_arglist (exp,
5873 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5874 return NULL_RTX;
5875
5876 tree dest = CALL_EXPR_ARG (exp, 0);
5877 tree src = CALL_EXPR_ARG (exp, 1);
5878 tree len = CALL_EXPR_ARG (exp, 2);
5879
5880 check_memop_access (exp, dest, src, len);
5881
5882 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5883 /*retmode=*/ RETURN_BEGIN, true);
5884 }
5885
5886 /* Expand a call EXP to the mempcpy builtin.
5887 Return NULL_RTX if we failed; the caller should emit a normal call,
5888 otherwise try to get the result in TARGET, if convenient (and in
5889 mode MODE if that's convenient). */
5890
5891 static rtx
5892 expand_builtin_mempcpy (tree exp, rtx target)
5893 {
5894 if (!validate_arglist (exp,
5895 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5896 return NULL_RTX;
5897
5898 tree dest = CALL_EXPR_ARG (exp, 0);
5899 tree src = CALL_EXPR_ARG (exp, 1);
5900 tree len = CALL_EXPR_ARG (exp, 2);
5901
5902 /* Policy does not generally allow using compute_objsize (which
5903 is used internally by check_memop_size) to change code generation
5904 or drive optimization decisions.
5905
5906 In this instance it is safe because the code we generate has
5907 the same semantics regardless of the return value of
5908 check_memop_sizes. Exactly the same amount of data is copied
5909 and the return value is exactly the same in both cases.
5910
5911 Furthermore, check_memop_size always uses mode 0 for the call to
5912 compute_objsize, so the imprecise nature of compute_objsize is
5913 avoided. */
5914
5915 /* Avoid expanding mempcpy into memcpy when the call is determined
5916 to overflow the buffer. This also prevents the same overflow
5917 from being diagnosed again when expanding memcpy. */
5918 if (!check_memop_access (exp, dest, src, len))
5919 return NULL_RTX;
5920
5921 return expand_builtin_mempcpy_args (dest, src, len,
5922 target, exp, /*retmode=*/ RETURN_END);
5923 }
5924
5925 /* Helper function to do the actual work for expand of memory copy family
5926 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
5927 of memory from SRC to DEST and assign to TARGET if convenient. Return
5928 value is based on RETMODE argument. */
5929
5930 static rtx
5931 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
5932 rtx target, tree exp, memop_ret retmode,
5933 bool might_overlap)
5934 {
5935 unsigned int src_align = get_pointer_alignment (src);
5936 unsigned int dest_align = get_pointer_alignment (dest);
5937 rtx dest_mem, src_mem, dest_addr, len_rtx;
5938 HOST_WIDE_INT expected_size = -1;
5939 unsigned int expected_align = 0;
5940 unsigned HOST_WIDE_INT min_size;
5941 unsigned HOST_WIDE_INT max_size;
5942 unsigned HOST_WIDE_INT probable_max_size;
5943
5944 bool is_move_done;
5945
5946 /* If DEST is not a pointer type, call the normal function. */
5947 if (dest_align == 0)
5948 return NULL_RTX;
5949
5950 /* If either SRC is not a pointer type, don't do this
5951 operation in-line. */
5952 if (src_align == 0)
5953 return NULL_RTX;
5954
5955 if (currently_expanding_gimple_stmt)
5956 stringop_block_profile (currently_expanding_gimple_stmt,
5957 &expected_align, &expected_size);
5958
5959 if (expected_align < dest_align)
5960 expected_align = dest_align;
5961 dest_mem = get_memory_rtx (dest, len);
5962 set_mem_align (dest_mem, dest_align);
5963 len_rtx = expand_normal (len);
5964 determine_block_size (len, len_rtx, &min_size, &max_size,
5965 &probable_max_size);
5966
5967 /* Try to get the byte representation of the constant SRC points to,
5968 with its byte size in NBYTES. */
5969 unsigned HOST_WIDE_INT nbytes;
5970 const char *rep = getbyterep (src, &nbytes);
5971
5972 /* If the function's constant bound LEN_RTX is less than or equal
5973 to the byte size of the representation of the constant argument,
5974 and if block move would be done by pieces, we can avoid loading
5975 the bytes from memory and only store the computed constant.
5976 This works in the overlap (memmove) case as well because
5977 store_by_pieces just generates a series of stores of constants
5978 from the representation returned by getbyterep(). */
5979 if (rep
5980 && CONST_INT_P (len_rtx)
5981 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
5982 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
5983 CONST_CAST (char *, rep),
5984 dest_align, false))
5985 {
5986 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
5987 builtin_memcpy_read_str,
5988 CONST_CAST (char *, rep),
5989 dest_align, false, retmode);
5990 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5991 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5992 return dest_mem;
5993 }
5994
5995 src_mem = get_memory_rtx (src, len);
5996 set_mem_align (src_mem, src_align);
5997
5998 /* Copy word part most expediently. */
5999 enum block_op_methods method = BLOCK_OP_NORMAL;
6000 if (CALL_EXPR_TAILCALL (exp)
6001 && (retmode == RETURN_BEGIN || target == const0_rtx))
6002 method = BLOCK_OP_TAILCALL;
6003 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
6004 && retmode == RETURN_END
6005 && !might_overlap
6006 && target != const0_rtx);
6007 if (use_mempcpy_call)
6008 method = BLOCK_OP_NO_LIBCALL_RET;
6009 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
6010 expected_align, expected_size,
6011 min_size, max_size, probable_max_size,
6012 use_mempcpy_call, &is_move_done,
6013 might_overlap);
6014
6015 /* Bail out when a mempcpy call would be expanded as libcall and when
6016 we have a target that provides a fast implementation
6017 of mempcpy routine. */
6018 if (!is_move_done)
6019 return NULL_RTX;
6020
6021 if (dest_addr == pc_rtx)
6022 return NULL_RTX;
6023
6024 if (dest_addr == 0)
6025 {
6026 dest_addr = force_operand (XEXP (dest_mem, 0), target);
6027 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6028 }
6029
6030 if (retmode != RETURN_BEGIN && target != const0_rtx)
6031 {
6032 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
6033 /* stpcpy pointer to last byte. */
6034 if (retmode == RETURN_END_MINUS_ONE)
6035 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
6036 }
6037
6038 return dest_addr;
6039 }
6040
6041 static rtx
6042 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
6043 rtx target, tree orig_exp, memop_ret retmode)
6044 {
6045 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
6046 retmode, false);
6047 }
6048
6049 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
6050 we failed, the caller should emit a normal call, otherwise try to
6051 get the result in TARGET, if convenient.
6052 Return value is based on RETMODE argument. */
6053
6054 static rtx
6055 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
6056 {
6057 class expand_operand ops[3];
6058 rtx dest_mem;
6059 rtx src_mem;
6060
6061 if (!targetm.have_movstr ())
6062 return NULL_RTX;
6063
6064 dest_mem = get_memory_rtx (dest, NULL);
6065 src_mem = get_memory_rtx (src, NULL);
6066 if (retmode == RETURN_BEGIN)
6067 {
6068 target = force_reg (Pmode, XEXP (dest_mem, 0));
6069 dest_mem = replace_equiv_address (dest_mem, target);
6070 }
6071
6072 create_output_operand (&ops[0],
6073 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
6074 create_fixed_operand (&ops[1], dest_mem);
6075 create_fixed_operand (&ops[2], src_mem);
6076 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
6077 return NULL_RTX;
6078
6079 if (retmode != RETURN_BEGIN && target != const0_rtx)
6080 {
6081 target = ops[0].value;
6082 /* movstr is supposed to set end to the address of the NUL
6083 terminator. If the caller requested a mempcpy-like return value,
6084 adjust it. */
6085 if (retmode == RETURN_END)
6086 {
6087 rtx tem = plus_constant (GET_MODE (target),
6088 gen_lowpart (GET_MODE (target), target), 1);
6089 emit_move_insn (target, force_operand (tem, NULL_RTX));
6090 }
6091 }
6092 return target;
6093 }
6094
6095 /* Do some very basic size validation of a call to the strcpy builtin
6096 given by EXP. Return NULL_RTX to have the built-in expand to a call
6097 to the library function. */
6098
6099 static rtx
6100 expand_builtin_strcat (tree exp)
6101 {
6102 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
6103 || !warn_stringop_overflow)
6104 return NULL_RTX;
6105
6106 tree dest = CALL_EXPR_ARG (exp, 0);
6107 tree src = CALL_EXPR_ARG (exp, 1);
6108
6109 /* There is no way here to determine the length of the string in
6110 the destination to which the SRC string is being appended so
6111 just diagnose cases when the souce string is longer than
6112 the destination object. */
6113 access_data data (exp, access_read_write, NULL_TREE, true,
6114 NULL_TREE, true);
6115 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6116 compute_objsize (src, ost, &data.src);
6117 tree destsize = compute_objsize (dest, ost, &data.dst);
6118
6119 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6120 src, destsize, data.mode, &data);
6121
6122 return NULL_RTX;
6123 }
6124
6125 /* Expand expression EXP, which is a call to the strcpy builtin. Return
6126 NULL_RTX if we failed the caller should emit a normal call, otherwise
6127 try to get the result in TARGET, if convenient (and in mode MODE if that's
6128 convenient). */
6129
6130 static rtx
6131 expand_builtin_strcpy (tree exp, rtx target)
6132 {
6133 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6134 return NULL_RTX;
6135
6136 tree dest = CALL_EXPR_ARG (exp, 0);
6137 tree src = CALL_EXPR_ARG (exp, 1);
6138
6139 if (warn_stringop_overflow)
6140 {
6141 access_data data (exp, access_read_write, NULL_TREE, true,
6142 NULL_TREE, true);
6143 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6144 compute_objsize (src, ost, &data.src);
6145 tree dstsize = compute_objsize (dest, ost, &data.dst);
6146 check_access (exp, /*dstwrite=*/ NULL_TREE,
6147 /*maxread=*/ NULL_TREE, /*srcstr=*/ src,
6148 dstsize, data.mode, &data);
6149 }
6150
6151 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
6152 {
6153 /* Check to see if the argument was declared attribute nonstring
6154 and if so, issue a warning since at this point it's not known
6155 to be nul-terminated. */
6156 tree fndecl = get_callee_fndecl (exp);
6157 maybe_warn_nonstring_arg (fndecl, exp);
6158 return ret;
6159 }
6160
6161 return NULL_RTX;
6162 }
6163
6164 /* Helper function to do the actual work for expand_builtin_strcpy. The
6165 arguments to the builtin_strcpy call DEST and SRC are broken out
6166 so that this can also be called without constructing an actual CALL_EXPR.
6167 The other arguments and return value are the same as for
6168 expand_builtin_strcpy. */
6169
6170 static rtx
6171 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
6172 {
6173 /* Detect strcpy calls with unterminated arrays.. */
6174 tree size;
6175 bool exact;
6176 if (tree nonstr = unterminated_array (src, &size, &exact))
6177 {
6178 /* NONSTR refers to the non-nul terminated constant array. */
6179 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr,
6180 size, exact);
6181 return NULL_RTX;
6182 }
6183
6184 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
6185 }
6186
6187 /* Expand a call EXP to the stpcpy builtin.
6188 Return NULL_RTX if we failed the caller should emit a normal call,
6189 otherwise try to get the result in TARGET, if convenient (and in
6190 mode MODE if that's convenient). */
6191
6192 static rtx
6193 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
6194 {
6195 tree dst, src;
6196 location_t loc = EXPR_LOCATION (exp);
6197
6198 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6199 return NULL_RTX;
6200
6201 dst = CALL_EXPR_ARG (exp, 0);
6202 src = CALL_EXPR_ARG (exp, 1);
6203
6204 if (warn_stringop_overflow)
6205 {
6206 access_data data (exp, access_read_write);
6207 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
6208 &data.dst);
6209 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6210 src, destsize, data.mode, &data);
6211 }
6212
6213 /* If return value is ignored, transform stpcpy into strcpy. */
6214 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
6215 {
6216 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
6217 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
6218 return expand_expr (result, target, mode, EXPAND_NORMAL);
6219 }
6220 else
6221 {
6222 tree len, lenp1;
6223 rtx ret;
6224
6225 /* Ensure we get an actual string whose length can be evaluated at
6226 compile-time, not an expression containing a string. This is
6227 because the latter will potentially produce pessimized code
6228 when used to produce the return value. */
6229 c_strlen_data lendata = { };
6230 if (!c_getstr (src)
6231 || !(len = c_strlen (src, 0, &lendata, 1)))
6232 return expand_movstr (dst, src, target,
6233 /*retmode=*/ RETURN_END_MINUS_ONE);
6234
6235 if (lendata.decl)
6236 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl);
6237
6238 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
6239 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
6240 target, exp,
6241 /*retmode=*/ RETURN_END_MINUS_ONE);
6242
6243 if (ret)
6244 return ret;
6245
6246 if (TREE_CODE (len) == INTEGER_CST)
6247 {
6248 rtx len_rtx = expand_normal (len);
6249
6250 if (CONST_INT_P (len_rtx))
6251 {
6252 ret = expand_builtin_strcpy_args (exp, dst, src, target);
6253
6254 if (ret)
6255 {
6256 if (! target)
6257 {
6258 if (mode != VOIDmode)
6259 target = gen_reg_rtx (mode);
6260 else
6261 target = gen_reg_rtx (GET_MODE (ret));
6262 }
6263 if (GET_MODE (target) != GET_MODE (ret))
6264 ret = gen_lowpart (GET_MODE (target), ret);
6265
6266 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
6267 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
6268 gcc_assert (ret);
6269
6270 return target;
6271 }
6272 }
6273 }
6274
6275 return expand_movstr (dst, src, target,
6276 /*retmode=*/ RETURN_END_MINUS_ONE);
6277 }
6278 }
6279
6280 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
6281 arguments while being careful to avoid duplicate warnings (which could
6282 be issued if the expander were to expand the call, resulting in it
6283 being emitted in expand_call(). */
6284
6285 static rtx
6286 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
6287 {
6288 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
6289 {
6290 /* The call has been successfully expanded. Check for nonstring
6291 arguments and issue warnings as appropriate. */
6292 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6293 return ret;
6294 }
6295
6296 return NULL_RTX;
6297 }
6298
6299 /* Check a call EXP to the stpncpy built-in for validity.
6300 Return NULL_RTX on both success and failure. */
6301
6302 static rtx
6303 expand_builtin_stpncpy (tree exp, rtx)
6304 {
6305 if (!validate_arglist (exp,
6306 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6307 || !warn_stringop_overflow)
6308 return NULL_RTX;
6309
6310 /* The source and destination of the call. */
6311 tree dest = CALL_EXPR_ARG (exp, 0);
6312 tree src = CALL_EXPR_ARG (exp, 1);
6313
6314 /* The exact number of bytes to write (not the maximum). */
6315 tree len = CALL_EXPR_ARG (exp, 2);
6316 access_data data (exp, access_read_write);
6317 /* The size of the destination object. */
6318 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6319 check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data);
6320 return NULL_RTX;
6321 }
6322
6323 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6324 bytes from constant string DATA + OFFSET and return it as target
6325 constant. */
6326
6327 rtx
6328 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
6329 scalar_int_mode mode)
6330 {
6331 const char *str = (const char *) data;
6332
6333 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
6334 return const0_rtx;
6335
6336 return c_readstr (str + offset, mode);
6337 }
6338
6339 /* Helper to check the sizes of sequences and the destination of calls
6340 to __builtin_strncat and __builtin___strncat_chk. Returns true on
6341 success (no overflow or invalid sizes), false otherwise. */
6342
6343 static bool
6344 check_strncat_sizes (tree exp, tree objsize)
6345 {
6346 tree dest = CALL_EXPR_ARG (exp, 0);
6347 tree src = CALL_EXPR_ARG (exp, 1);
6348 tree maxread = CALL_EXPR_ARG (exp, 2);
6349
6350 /* Try to determine the range of lengths that the source expression
6351 refers to. */
6352 c_strlen_data lendata = { };
6353 get_range_strlen (src, &lendata, /* eltsize = */ 1);
6354
6355 /* Try to verify that the destination is big enough for the shortest
6356 string. */
6357
6358 access_data data (exp, access_read_write, maxread, true);
6359 if (!objsize && warn_stringop_overflow)
6360 {
6361 /* If it hasn't been provided by __strncat_chk, try to determine
6362 the size of the destination object into which the source is
6363 being copied. */
6364 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6365 }
6366
6367 /* Add one for the terminating nul. */
6368 tree srclen = (lendata.minlen
6369 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
6370 size_one_node)
6371 : NULL_TREE);
6372
6373 /* The strncat function copies at most MAXREAD bytes and always appends
6374 the terminating nul so the specified upper bound should never be equal
6375 to (or greater than) the size of the destination. */
6376 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
6377 && tree_int_cst_equal (objsize, maxread))
6378 {
6379 location_t loc = tree_inlined_location (exp);
6380 warning_at (loc, OPT_Wstringop_overflow_,
6381 "%K%qD specified bound %E equals destination size",
6382 exp, get_callee_fndecl (exp), maxread);
6383
6384 return false;
6385 }
6386
6387 if (!srclen
6388 || (maxread && tree_fits_uhwi_p (maxread)
6389 && tree_fits_uhwi_p (srclen)
6390 && tree_int_cst_lt (maxread, srclen)))
6391 srclen = maxread;
6392
6393 /* The number of bytes to write is LEN but check_access will alsoa
6394 check SRCLEN if LEN's value isn't known. */
6395 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6396 objsize, data.mode, &data);
6397 }
6398
6399 /* Similar to expand_builtin_strcat, do some very basic size validation
6400 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
6401 the built-in expand to a call to the library function. */
6402
6403 static rtx
6404 expand_builtin_strncat (tree exp, rtx)
6405 {
6406 if (!validate_arglist (exp,
6407 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6408 || !warn_stringop_overflow)
6409 return NULL_RTX;
6410
6411 tree dest = CALL_EXPR_ARG (exp, 0);
6412 tree src = CALL_EXPR_ARG (exp, 1);
6413 /* The upper bound on the number of bytes to write. */
6414 tree maxread = CALL_EXPR_ARG (exp, 2);
6415
6416 /* Detect unterminated source (only). */
6417 if (!check_nul_terminated_array (exp, src, maxread))
6418 return NULL_RTX;
6419
6420 /* The length of the source sequence. */
6421 tree slen = c_strlen (src, 1);
6422
6423 /* Try to determine the range of lengths that the source expression
6424 refers to. Since the lengths are only used for warning and not
6425 for code generation disable strict mode below. */
6426 tree maxlen = slen;
6427 if (!maxlen)
6428 {
6429 c_strlen_data lendata = { };
6430 get_range_strlen (src, &lendata, /* eltsize = */ 1);
6431 maxlen = lendata.maxbound;
6432 }
6433
6434 access_data data (exp, access_read_write);
6435 /* Try to verify that the destination is big enough for the shortest
6436 string. First try to determine the size of the destination object
6437 into which the source is being copied. */
6438 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6439
6440 /* Add one for the terminating nul. */
6441 tree srclen = (maxlen
6442 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
6443 size_one_node)
6444 : NULL_TREE);
6445
6446 /* The strncat function copies at most MAXREAD bytes and always appends
6447 the terminating nul so the specified upper bound should never be equal
6448 to (or greater than) the size of the destination. */
6449 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
6450 && tree_int_cst_equal (destsize, maxread))
6451 {
6452 location_t loc = tree_inlined_location (exp);
6453 warning_at (loc, OPT_Wstringop_overflow_,
6454 "%K%qD specified bound %E equals destination size",
6455 exp, get_callee_fndecl (exp), maxread);
6456
6457 return NULL_RTX;
6458 }
6459
6460 if (!srclen
6461 || (maxread && tree_fits_uhwi_p (maxread)
6462 && tree_fits_uhwi_p (srclen)
6463 && tree_int_cst_lt (maxread, srclen)))
6464 srclen = maxread;
6465
6466 check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6467 destsize, data.mode, &data);
6468 return NULL_RTX;
6469 }
6470
6471 /* Expand expression EXP, which is a call to the strncpy builtin. Return
6472 NULL_RTX if we failed the caller should emit a normal call. */
6473
6474 static rtx
6475 expand_builtin_strncpy (tree exp, rtx target)
6476 {
6477 location_t loc = EXPR_LOCATION (exp);
6478
6479 if (!validate_arglist (exp,
6480 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6481 return NULL_RTX;
6482 tree dest = CALL_EXPR_ARG (exp, 0);
6483 tree src = CALL_EXPR_ARG (exp, 1);
6484 /* The number of bytes to write (not the maximum). */
6485 tree len = CALL_EXPR_ARG (exp, 2);
6486
6487 /* The length of the source sequence. */
6488 tree slen = c_strlen (src, 1);
6489
6490 if (warn_stringop_overflow)
6491 {
6492 access_data data (exp, access_read_write, len, true, len, true);
6493 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6494 compute_objsize (src, ost, &data.src);
6495 tree dstsize = compute_objsize (dest, ost, &data.dst);
6496 /* The number of bytes to write is LEN but check_access will also
6497 check SLEN if LEN's value isn't known. */
6498 check_access (exp, /*dstwrite=*/len,
6499 /*maxread=*/len, src, dstsize, data.mode, &data);
6500 }
6501
6502 /* We must be passed a constant len and src parameter. */
6503 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
6504 return NULL_RTX;
6505
6506 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
6507
6508 /* We're required to pad with trailing zeros if the requested
6509 len is greater than strlen(s2)+1. In that case try to
6510 use store_by_pieces, if it fails, punt. */
6511 if (tree_int_cst_lt (slen, len))
6512 {
6513 unsigned int dest_align = get_pointer_alignment (dest);
6514 const char *p = c_getstr (src);
6515 rtx dest_mem;
6516
6517 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
6518 || !can_store_by_pieces (tree_to_uhwi (len),
6519 builtin_strncpy_read_str,
6520 CONST_CAST (char *, p),
6521 dest_align, false))
6522 return NULL_RTX;
6523
6524 dest_mem = get_memory_rtx (dest, len);
6525 store_by_pieces (dest_mem, tree_to_uhwi (len),
6526 builtin_strncpy_read_str,
6527 CONST_CAST (char *, p), dest_align, false,
6528 RETURN_BEGIN);
6529 dest_mem = force_operand (XEXP (dest_mem, 0), target);
6530 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6531 return dest_mem;
6532 }
6533
6534 return NULL_RTX;
6535 }
6536
6537 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6538 bytes from constant string DATA + OFFSET and return it as target
6539 constant. */
6540
6541 rtx
6542 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6543 scalar_int_mode mode)
6544 {
6545 const char *c = (const char *) data;
6546 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
6547
6548 memset (p, *c, GET_MODE_SIZE (mode));
6549
6550 return c_readstr (p, mode);
6551 }
6552
6553 /* Callback routine for store_by_pieces. Return the RTL of a register
6554 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
6555 char value given in the RTL register data. For example, if mode is
6556 4 bytes wide, return the RTL for 0x01010101*data. */
6557
6558 static rtx
6559 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6560 scalar_int_mode mode)
6561 {
6562 rtx target, coeff;
6563 size_t size;
6564 char *p;
6565
6566 size = GET_MODE_SIZE (mode);
6567 if (size == 1)
6568 return (rtx) data;
6569
6570 p = XALLOCAVEC (char, size);
6571 memset (p, 1, size);
6572 coeff = c_readstr (p, mode);
6573
6574 target = convert_to_mode (mode, (rtx) data, 1);
6575 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
6576 return force_reg (mode, target);
6577 }
6578
6579 /* Expand expression EXP, which is a call to the memset builtin. Return
6580 NULL_RTX if we failed the caller should emit a normal call, otherwise
6581 try to get the result in TARGET, if convenient (and in mode MODE if that's
6582 convenient). */
6583
6584 static rtx
6585 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
6586 {
6587 if (!validate_arglist (exp,
6588 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
6589 return NULL_RTX;
6590
6591 tree dest = CALL_EXPR_ARG (exp, 0);
6592 tree val = CALL_EXPR_ARG (exp, 1);
6593 tree len = CALL_EXPR_ARG (exp, 2);
6594
6595 check_memop_access (exp, dest, NULL_TREE, len);
6596
6597 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
6598 }
6599
6600 /* Helper function to do the actual work for expand_builtin_memset. The
6601 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
6602 so that this can also be called without constructing an actual CALL_EXPR.
6603 The other arguments and return value are the same as for
6604 expand_builtin_memset. */
6605
6606 static rtx
6607 expand_builtin_memset_args (tree dest, tree val, tree len,
6608 rtx target, machine_mode mode, tree orig_exp)
6609 {
6610 tree fndecl, fn;
6611 enum built_in_function fcode;
6612 machine_mode val_mode;
6613 char c;
6614 unsigned int dest_align;
6615 rtx dest_mem, dest_addr, len_rtx;
6616 HOST_WIDE_INT expected_size = -1;
6617 unsigned int expected_align = 0;
6618 unsigned HOST_WIDE_INT min_size;
6619 unsigned HOST_WIDE_INT max_size;
6620 unsigned HOST_WIDE_INT probable_max_size;
6621
6622 dest_align = get_pointer_alignment (dest);
6623
6624 /* If DEST is not a pointer type, don't do this operation in-line. */
6625 if (dest_align == 0)
6626 return NULL_RTX;
6627
6628 if (currently_expanding_gimple_stmt)
6629 stringop_block_profile (currently_expanding_gimple_stmt,
6630 &expected_align, &expected_size);
6631
6632 if (expected_align < dest_align)
6633 expected_align = dest_align;
6634
6635 /* If the LEN parameter is zero, return DEST. */
6636 if (integer_zerop (len))
6637 {
6638 /* Evaluate and ignore VAL in case it has side-effects. */
6639 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
6640 return expand_expr (dest, target, mode, EXPAND_NORMAL);
6641 }
6642
6643 /* Stabilize the arguments in case we fail. */
6644 dest = builtin_save_expr (dest);
6645 val = builtin_save_expr (val);
6646 len = builtin_save_expr (len);
6647
6648 len_rtx = expand_normal (len);
6649 determine_block_size (len, len_rtx, &min_size, &max_size,
6650 &probable_max_size);
6651 dest_mem = get_memory_rtx (dest, len);
6652 val_mode = TYPE_MODE (unsigned_char_type_node);
6653
6654 if (TREE_CODE (val) != INTEGER_CST)
6655 {
6656 rtx val_rtx;
6657
6658 val_rtx = expand_normal (val);
6659 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
6660
6661 /* Assume that we can memset by pieces if we can store
6662 * the coefficients by pieces (in the required modes).
6663 * We can't pass builtin_memset_gen_str as that emits RTL. */
6664 c = 1;
6665 if (tree_fits_uhwi_p (len)
6666 && can_store_by_pieces (tree_to_uhwi (len),
6667 builtin_memset_read_str, &c, dest_align,
6668 true))
6669 {
6670 val_rtx = force_reg (val_mode, val_rtx);
6671 store_by_pieces (dest_mem, tree_to_uhwi (len),
6672 builtin_memset_gen_str, val_rtx, dest_align,
6673 true, RETURN_BEGIN);
6674 }
6675 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
6676 dest_align, expected_align,
6677 expected_size, min_size, max_size,
6678 probable_max_size))
6679 goto do_libcall;
6680
6681 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6682 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6683 return dest_mem;
6684 }
6685
6686 if (target_char_cast (val, &c))
6687 goto do_libcall;
6688
6689 if (c)
6690 {
6691 if (tree_fits_uhwi_p (len)
6692 && can_store_by_pieces (tree_to_uhwi (len),
6693 builtin_memset_read_str, &c, dest_align,
6694 true))
6695 store_by_pieces (dest_mem, tree_to_uhwi (len),
6696 builtin_memset_read_str, &c, dest_align, true,
6697 RETURN_BEGIN);
6698 else if (!set_storage_via_setmem (dest_mem, len_rtx,
6699 gen_int_mode (c, val_mode),
6700 dest_align, expected_align,
6701 expected_size, min_size, max_size,
6702 probable_max_size))
6703 goto do_libcall;
6704
6705 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6706 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6707 return dest_mem;
6708 }
6709
6710 set_mem_align (dest_mem, dest_align);
6711 dest_addr = clear_storage_hints (dest_mem, len_rtx,
6712 CALL_EXPR_TAILCALL (orig_exp)
6713 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
6714 expected_align, expected_size,
6715 min_size, max_size,
6716 probable_max_size);
6717
6718 if (dest_addr == 0)
6719 {
6720 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6721 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6722 }
6723
6724 return dest_addr;
6725
6726 do_libcall:
6727 fndecl = get_callee_fndecl (orig_exp);
6728 fcode = DECL_FUNCTION_CODE (fndecl);
6729 if (fcode == BUILT_IN_MEMSET)
6730 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
6731 dest, val, len);
6732 else if (fcode == BUILT_IN_BZERO)
6733 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
6734 dest, len);
6735 else
6736 gcc_unreachable ();
6737 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
6738 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
6739 return expand_call (fn, target, target == const0_rtx);
6740 }
6741
6742 /* Expand expression EXP, which is a call to the bzero builtin. Return
6743 NULL_RTX if we failed the caller should emit a normal call. */
6744
6745 static rtx
6746 expand_builtin_bzero (tree exp)
6747 {
6748 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6749 return NULL_RTX;
6750
6751 tree dest = CALL_EXPR_ARG (exp, 0);
6752 tree size = CALL_EXPR_ARG (exp, 1);
6753
6754 check_memop_access (exp, dest, NULL_TREE, size);
6755
6756 /* New argument list transforming bzero(ptr x, int y) to
6757 memset(ptr x, int 0, size_t y). This is done this way
6758 so that if it isn't expanded inline, we fallback to
6759 calling bzero instead of memset. */
6760
6761 location_t loc = EXPR_LOCATION (exp);
6762
6763 return expand_builtin_memset_args (dest, integer_zero_node,
6764 fold_convert_loc (loc,
6765 size_type_node, size),
6766 const0_rtx, VOIDmode, exp);
6767 }
6768
6769 /* Try to expand cmpstr operation ICODE with the given operands.
6770 Return the result rtx on success, otherwise return null. */
6771
6772 static rtx
6773 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
6774 HOST_WIDE_INT align)
6775 {
6776 machine_mode insn_mode = insn_data[icode].operand[0].mode;
6777
6778 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
6779 target = NULL_RTX;
6780
6781 class expand_operand ops[4];
6782 create_output_operand (&ops[0], target, insn_mode);
6783 create_fixed_operand (&ops[1], arg1_rtx);
6784 create_fixed_operand (&ops[2], arg2_rtx);
6785 create_integer_operand (&ops[3], align);
6786 if (maybe_expand_insn (icode, 4, ops))
6787 return ops[0].value;
6788 return NULL_RTX;
6789 }
6790
6791 /* Expand expression EXP, which is a call to the memcmp built-in function.
6792 Return NULL_RTX if we failed and the caller should emit a normal call,
6793 otherwise try to get the result in TARGET, if convenient.
6794 RESULT_EQ is true if we can relax the returned value to be either zero
6795 or nonzero, without caring about the sign. */
6796
6797 static rtx
6798 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
6799 {
6800 if (!validate_arglist (exp,
6801 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6802 return NULL_RTX;
6803
6804 tree arg1 = CALL_EXPR_ARG (exp, 0);
6805 tree arg2 = CALL_EXPR_ARG (exp, 1);
6806 tree len = CALL_EXPR_ARG (exp, 2);
6807
6808 /* Diagnose calls where the specified length exceeds the size of either
6809 object. */
6810 if (!check_read_access (exp, arg1, len, 0)
6811 || !check_read_access (exp, arg2, len, 0))
6812 return NULL_RTX;
6813
6814 /* Due to the performance benefit, always inline the calls first
6815 when result_eq is false. */
6816 rtx result = NULL_RTX;
6817 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
6818 if (!result_eq && fcode != BUILT_IN_BCMP)
6819 {
6820 result = inline_expand_builtin_bytecmp (exp, target);
6821 if (result)
6822 return result;
6823 }
6824
6825 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6826 location_t loc = EXPR_LOCATION (exp);
6827
6828 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6829 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6830
6831 /* If we don't have POINTER_TYPE, call the function. */
6832 if (arg1_align == 0 || arg2_align == 0)
6833 return NULL_RTX;
6834
6835 rtx arg1_rtx = get_memory_rtx (arg1, len);
6836 rtx arg2_rtx = get_memory_rtx (arg2, len);
6837 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
6838
6839 /* Set MEM_SIZE as appropriate. */
6840 if (CONST_INT_P (len_rtx))
6841 {
6842 set_mem_size (arg1_rtx, INTVAL (len_rtx));
6843 set_mem_size (arg2_rtx, INTVAL (len_rtx));
6844 }
6845
6846 by_pieces_constfn constfn = NULL;
6847
6848 /* Try to get the byte representation of the constant ARG2 (or, only
6849 when the function's result is used for equality to zero, ARG1)
6850 points to, with its byte size in NBYTES. */
6851 unsigned HOST_WIDE_INT nbytes;
6852 const char *rep = getbyterep (arg2, &nbytes);
6853 if (result_eq && rep == NULL)
6854 {
6855 /* For equality to zero the arguments are interchangeable. */
6856 rep = getbyterep (arg1, &nbytes);
6857 if (rep != NULL)
6858 std::swap (arg1_rtx, arg2_rtx);
6859 }
6860
6861 /* If the function's constant bound LEN_RTX is less than or equal
6862 to the byte size of the representation of the constant argument,
6863 and if block move would be done by pieces, we can avoid loading
6864 the bytes from memory and only store the computed constant result. */
6865 if (rep
6866 && CONST_INT_P (len_rtx)
6867 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
6868 constfn = builtin_memcpy_read_str;
6869
6870 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
6871 TREE_TYPE (len), target,
6872 result_eq, constfn,
6873 CONST_CAST (char *, rep));
6874
6875 if (result)
6876 {
6877 /* Return the value in the proper mode for this function. */
6878 if (GET_MODE (result) == mode)
6879 return result;
6880
6881 if (target != 0)
6882 {
6883 convert_move (target, result, 0);
6884 return target;
6885 }
6886
6887 return convert_to_mode (mode, result, 0);
6888 }
6889
6890 return NULL_RTX;
6891 }
6892
6893 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
6894 if we failed the caller should emit a normal call, otherwise try to get
6895 the result in TARGET, if convenient. */
6896
6897 static rtx
6898 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
6899 {
6900 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6901 return NULL_RTX;
6902
6903 tree arg1 = CALL_EXPR_ARG (exp, 0);
6904 tree arg2 = CALL_EXPR_ARG (exp, 1);
6905
6906 if (!check_read_access (exp, arg1)
6907 || !check_read_access (exp, arg2))
6908 return NULL_RTX;
6909
6910 /* Due to the performance benefit, always inline the calls first. */
6911 rtx result = NULL_RTX;
6912 result = inline_expand_builtin_bytecmp (exp, target);
6913 if (result)
6914 return result;
6915
6916 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
6917 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
6918 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
6919 return NULL_RTX;
6920
6921 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6922 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6923
6924 /* If we don't have POINTER_TYPE, call the function. */
6925 if (arg1_align == 0 || arg2_align == 0)
6926 return NULL_RTX;
6927
6928 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
6929 arg1 = builtin_save_expr (arg1);
6930 arg2 = builtin_save_expr (arg2);
6931
6932 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
6933 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
6934
6935 /* Try to call cmpstrsi. */
6936 if (cmpstr_icode != CODE_FOR_nothing)
6937 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
6938 MIN (arg1_align, arg2_align));
6939
6940 /* Try to determine at least one length and call cmpstrnsi. */
6941 if (!result && cmpstrn_icode != CODE_FOR_nothing)
6942 {
6943 tree len;
6944 rtx arg3_rtx;
6945
6946 tree len1 = c_strlen (arg1, 1);
6947 tree len2 = c_strlen (arg2, 1);
6948
6949 if (len1)
6950 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
6951 if (len2)
6952 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
6953
6954 /* If we don't have a constant length for the first, use the length
6955 of the second, if we know it. We don't require a constant for
6956 this case; some cost analysis could be done if both are available
6957 but neither is constant. For now, assume they're equally cheap,
6958 unless one has side effects. If both strings have constant lengths,
6959 use the smaller. */
6960
6961 if (!len1)
6962 len = len2;
6963 else if (!len2)
6964 len = len1;
6965 else if (TREE_SIDE_EFFECTS (len1))
6966 len = len2;
6967 else if (TREE_SIDE_EFFECTS (len2))
6968 len = len1;
6969 else if (TREE_CODE (len1) != INTEGER_CST)
6970 len = len2;
6971 else if (TREE_CODE (len2) != INTEGER_CST)
6972 len = len1;
6973 else if (tree_int_cst_lt (len1, len2))
6974 len = len1;
6975 else
6976 len = len2;
6977
6978 /* If both arguments have side effects, we cannot optimize. */
6979 if (len && !TREE_SIDE_EFFECTS (len))
6980 {
6981 arg3_rtx = expand_normal (len);
6982 result = expand_cmpstrn_or_cmpmem
6983 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
6984 arg3_rtx, MIN (arg1_align, arg2_align));
6985 }
6986 }
6987
6988 tree fndecl = get_callee_fndecl (exp);
6989 if (result)
6990 {
6991 /* Check to see if the argument was declared attribute nonstring
6992 and if so, issue a warning since at this point it's not known
6993 to be nul-terminated. */
6994 maybe_warn_nonstring_arg (fndecl, exp);
6995
6996 /* Return the value in the proper mode for this function. */
6997 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6998 if (GET_MODE (result) == mode)
6999 return result;
7000 if (target == 0)
7001 return convert_to_mode (mode, result, 0);
7002 convert_move (target, result, 0);
7003 return target;
7004 }
7005
7006 /* Expand the library call ourselves using a stabilized argument
7007 list to avoid re-evaluating the function's arguments twice. */
7008 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
7009 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
7010 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
7011 return expand_call (fn, target, target == const0_rtx);
7012 }
7013
7014 /* Expand expression EXP, which is a call to the strncmp builtin. Return
7015 NULL_RTX if we failed the caller should emit a normal call, otherwise
7016 try to get the result in TARGET, if convenient. */
7017
7018 static rtx
7019 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
7020 ATTRIBUTE_UNUSED machine_mode mode)
7021 {
7022 if (!validate_arglist (exp,
7023 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7024 return NULL_RTX;
7025
7026 tree arg1 = CALL_EXPR_ARG (exp, 0);
7027 tree arg2 = CALL_EXPR_ARG (exp, 1);
7028 tree arg3 = CALL_EXPR_ARG (exp, 2);
7029
7030 if (!check_nul_terminated_array (exp, arg1, arg3)
7031 || !check_nul_terminated_array (exp, arg2, arg3))
7032 return NULL_RTX;
7033
7034 location_t loc = tree_inlined_location (exp);
7035 tree len1 = c_strlen (arg1, 1);
7036 tree len2 = c_strlen (arg2, 1);
7037
7038 if (!len1 || !len2)
7039 {
7040 /* Check to see if the argument was declared attribute nonstring
7041 and if so, issue a warning since at this point it's not known
7042 to be nul-terminated. */
7043 if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp)
7044 && !len1 && !len2)
7045 {
7046 /* A strncmp read is constrained not just by the bound but
7047 also by the length of the shorter string. Specifying
7048 a bound that's larger than the size of either array makes
7049 no sense and is likely a bug. When the length of neither
7050 of the two strings is known but the sizes of both of
7051 the arrays they are stored in is, issue a warning if
7052 the bound is larger than than the size of the larger
7053 of the two arrays. */
7054
7055 access_ref ref1 (arg3, true);
7056 access_ref ref2 (arg3, true);
7057
7058 tree bndrng[2] = { NULL_TREE, NULL_TREE };
7059 get_size_range (arg3, bndrng, ref1.bndrng);
7060
7061 tree size1 = compute_objsize (arg1, 1, &ref1);
7062 tree size2 = compute_objsize (arg2, 1, &ref2);
7063 tree func = get_callee_fndecl (exp);
7064
7065 if (size1 && size2 && bndrng[0] && !integer_zerop (bndrng[0]))
7066 {
7067 offset_int rem1 = ref1.size_remaining ();
7068 offset_int rem2 = ref2.size_remaining ();
7069 if (rem1 == 0 || rem2 == 0)
7070 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7071 bndrng, integer_zero_node);
7072 else
7073 {
7074 offset_int maxrem = wi::max (rem1, rem2, UNSIGNED);
7075 if (maxrem < wi::to_offset (bndrng[0]))
7076 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp,
7077 func, bndrng,
7078 wide_int_to_tree (sizetype, maxrem));
7079 }
7080 }
7081 else if (bndrng[0]
7082 && !integer_zerop (bndrng[0])
7083 && ((size1 && integer_zerop (size1))
7084 || (size2 && integer_zerop (size2))))
7085 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7086 bndrng, integer_zero_node);
7087 }
7088 }
7089
7090 /* Due to the performance benefit, always inline the calls first. */
7091 rtx result = NULL_RTX;
7092 result = inline_expand_builtin_bytecmp (exp, target);
7093 if (result)
7094 return result;
7095
7096 /* If c_strlen can determine an expression for one of the string
7097 lengths, and it doesn't have side effects, then emit cmpstrnsi
7098 using length MIN(strlen(string)+1, arg3). */
7099 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
7100 if (cmpstrn_icode == CODE_FOR_nothing)
7101 return NULL_RTX;
7102
7103 tree len;
7104
7105 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7106 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7107
7108 if (len1)
7109 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7110 if (len2)
7111 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7112
7113 tree len3 = fold_convert_loc (loc, sizetype, arg3);
7114
7115 /* If we don't have a constant length for the first, use the length
7116 of the second, if we know it. If neither string is constant length,
7117 use the given length argument. We don't require a constant for
7118 this case; some cost analysis could be done if both are available
7119 but neither is constant. For now, assume they're equally cheap,
7120 unless one has side effects. If both strings have constant lengths,
7121 use the smaller. */
7122
7123 if (!len1 && !len2)
7124 len = len3;
7125 else if (!len1)
7126 len = len2;
7127 else if (!len2)
7128 len = len1;
7129 else if (TREE_SIDE_EFFECTS (len1))
7130 len = len2;
7131 else if (TREE_SIDE_EFFECTS (len2))
7132 len = len1;
7133 else if (TREE_CODE (len1) != INTEGER_CST)
7134 len = len2;
7135 else if (TREE_CODE (len2) != INTEGER_CST)
7136 len = len1;
7137 else if (tree_int_cst_lt (len1, len2))
7138 len = len1;
7139 else
7140 len = len2;
7141
7142 /* If we are not using the given length, we must incorporate it here.
7143 The actual new length parameter will be MIN(len,arg3) in this case. */
7144 if (len != len3)
7145 {
7146 len = fold_convert_loc (loc, sizetype, len);
7147 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
7148 }
7149 rtx arg1_rtx = get_memory_rtx (arg1, len);
7150 rtx arg2_rtx = get_memory_rtx (arg2, len);
7151 rtx arg3_rtx = expand_normal (len);
7152 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
7153 arg2_rtx, TREE_TYPE (len), arg3_rtx,
7154 MIN (arg1_align, arg2_align));
7155
7156 tree fndecl = get_callee_fndecl (exp);
7157 if (result)
7158 {
7159 /* Return the value in the proper mode for this function. */
7160 mode = TYPE_MODE (TREE_TYPE (exp));
7161 if (GET_MODE (result) == mode)
7162 return result;
7163 if (target == 0)
7164 return convert_to_mode (mode, result, 0);
7165 convert_move (target, result, 0);
7166 return target;
7167 }
7168
7169 /* Expand the library call ourselves using a stabilized argument
7170 list to avoid re-evaluating the function's arguments twice. */
7171 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
7172 if (TREE_NO_WARNING (exp))
7173 TREE_NO_WARNING (call) = true;
7174 gcc_assert (TREE_CODE (call) == CALL_EXPR);
7175 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
7176 return expand_call (call, target, target == const0_rtx);
7177 }
7178
7179 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
7180 if that's convenient. */
7181
7182 rtx
7183 expand_builtin_saveregs (void)
7184 {
7185 rtx val;
7186 rtx_insn *seq;
7187
7188 /* Don't do __builtin_saveregs more than once in a function.
7189 Save the result of the first call and reuse it. */
7190 if (saveregs_value != 0)
7191 return saveregs_value;
7192
7193 /* When this function is called, it means that registers must be
7194 saved on entry to this function. So we migrate the call to the
7195 first insn of this function. */
7196
7197 start_sequence ();
7198
7199 /* Do whatever the machine needs done in this case. */
7200 val = targetm.calls.expand_builtin_saveregs ();
7201
7202 seq = get_insns ();
7203 end_sequence ();
7204
7205 saveregs_value = val;
7206
7207 /* Put the insns after the NOTE that starts the function. If this
7208 is inside a start_sequence, make the outer-level insn chain current, so
7209 the code is placed at the start of the function. */
7210 push_topmost_sequence ();
7211 emit_insn_after (seq, entry_of_function ());
7212 pop_topmost_sequence ();
7213
7214 return val;
7215 }
7216
7217 /* Expand a call to __builtin_next_arg. */
7218
7219 static rtx
7220 expand_builtin_next_arg (void)
7221 {
7222 /* Checking arguments is already done in fold_builtin_next_arg
7223 that must be called before this function. */
7224 return expand_binop (ptr_mode, add_optab,
7225 crtl->args.internal_arg_pointer,
7226 crtl->args.arg_offset_rtx,
7227 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7228 }
7229
7230 /* Make it easier for the backends by protecting the valist argument
7231 from multiple evaluations. */
7232
7233 static tree
7234 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
7235 {
7236 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
7237
7238 /* The current way of determining the type of valist is completely
7239 bogus. We should have the information on the va builtin instead. */
7240 if (!vatype)
7241 vatype = targetm.fn_abi_va_list (cfun->decl);
7242
7243 if (TREE_CODE (vatype) == ARRAY_TYPE)
7244 {
7245 if (TREE_SIDE_EFFECTS (valist))
7246 valist = save_expr (valist);
7247
7248 /* For this case, the backends will be expecting a pointer to
7249 vatype, but it's possible we've actually been given an array
7250 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
7251 So fix it. */
7252 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
7253 {
7254 tree p1 = build_pointer_type (TREE_TYPE (vatype));
7255 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
7256 }
7257 }
7258 else
7259 {
7260 tree pt = build_pointer_type (vatype);
7261
7262 if (! needs_lvalue)
7263 {
7264 if (! TREE_SIDE_EFFECTS (valist))
7265 return valist;
7266
7267 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
7268 TREE_SIDE_EFFECTS (valist) = 1;
7269 }
7270
7271 if (TREE_SIDE_EFFECTS (valist))
7272 valist = save_expr (valist);
7273 valist = fold_build2_loc (loc, MEM_REF,
7274 vatype, valist, build_int_cst (pt, 0));
7275 }
7276
7277 return valist;
7278 }
7279
7280 /* The "standard" definition of va_list is void*. */
7281
7282 tree
7283 std_build_builtin_va_list (void)
7284 {
7285 return ptr_type_node;
7286 }
7287
7288 /* The "standard" abi va_list is va_list_type_node. */
7289
7290 tree
7291 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
7292 {
7293 return va_list_type_node;
7294 }
7295
7296 /* The "standard" type of va_list is va_list_type_node. */
7297
7298 tree
7299 std_canonical_va_list_type (tree type)
7300 {
7301 tree wtype, htype;
7302
7303 wtype = va_list_type_node;
7304 htype = type;
7305
7306 if (TREE_CODE (wtype) == ARRAY_TYPE)
7307 {
7308 /* If va_list is an array type, the argument may have decayed
7309 to a pointer type, e.g. by being passed to another function.
7310 In that case, unwrap both types so that we can compare the
7311 underlying records. */
7312 if (TREE_CODE (htype) == ARRAY_TYPE
7313 || POINTER_TYPE_P (htype))
7314 {
7315 wtype = TREE_TYPE (wtype);
7316 htype = TREE_TYPE (htype);
7317 }
7318 }
7319 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
7320 return va_list_type_node;
7321
7322 return NULL_TREE;
7323 }
7324
7325 /* The "standard" implementation of va_start: just assign `nextarg' to
7326 the variable. */
7327
7328 void
7329 std_expand_builtin_va_start (tree valist, rtx nextarg)
7330 {
7331 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
7332 convert_move (va_r, nextarg, 0);
7333 }
7334
7335 /* Expand EXP, a call to __builtin_va_start. */
7336
7337 static rtx
7338 expand_builtin_va_start (tree exp)
7339 {
7340 rtx nextarg;
7341 tree valist;
7342 location_t loc = EXPR_LOCATION (exp);
7343
7344 if (call_expr_nargs (exp) < 2)
7345 {
7346 error_at (loc, "too few arguments to function %<va_start%>");
7347 return const0_rtx;
7348 }
7349
7350 if (fold_builtin_next_arg (exp, true))
7351 return const0_rtx;
7352
7353 nextarg = expand_builtin_next_arg ();
7354 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
7355
7356 if (targetm.expand_builtin_va_start)
7357 targetm.expand_builtin_va_start (valist, nextarg);
7358 else
7359 std_expand_builtin_va_start (valist, nextarg);
7360
7361 return const0_rtx;
7362 }
7363
7364 /* Expand EXP, a call to __builtin_va_end. */
7365
7366 static rtx
7367 expand_builtin_va_end (tree exp)
7368 {
7369 tree valist = CALL_EXPR_ARG (exp, 0);
7370
7371 /* Evaluate for side effects, if needed. I hate macros that don't
7372 do that. */
7373 if (TREE_SIDE_EFFECTS (valist))
7374 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
7375
7376 return const0_rtx;
7377 }
7378
7379 /* Expand EXP, a call to __builtin_va_copy. We do this as a
7380 builtin rather than just as an assignment in stdarg.h because of the
7381 nastiness of array-type va_list types. */
7382
7383 static rtx
7384 expand_builtin_va_copy (tree exp)
7385 {
7386 tree dst, src, t;
7387 location_t loc = EXPR_LOCATION (exp);
7388
7389 dst = CALL_EXPR_ARG (exp, 0);
7390 src = CALL_EXPR_ARG (exp, 1);
7391
7392 dst = stabilize_va_list_loc (loc, dst, 1);
7393 src = stabilize_va_list_loc (loc, src, 0);
7394
7395 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
7396
7397 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
7398 {
7399 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
7400 TREE_SIDE_EFFECTS (t) = 1;
7401 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7402 }
7403 else
7404 {
7405 rtx dstb, srcb, size;
7406
7407 /* Evaluate to pointers. */
7408 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
7409 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
7410 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
7411 NULL_RTX, VOIDmode, EXPAND_NORMAL);
7412
7413 dstb = convert_memory_address (Pmode, dstb);
7414 srcb = convert_memory_address (Pmode, srcb);
7415
7416 /* "Dereference" to BLKmode memories. */
7417 dstb = gen_rtx_MEM (BLKmode, dstb);
7418 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
7419 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7420 srcb = gen_rtx_MEM (BLKmode, srcb);
7421 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
7422 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7423
7424 /* Copy. */
7425 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
7426 }
7427
7428 return const0_rtx;
7429 }
7430
7431 /* Expand a call to one of the builtin functions __builtin_frame_address or
7432 __builtin_return_address. */
7433
7434 static rtx
7435 expand_builtin_frame_address (tree fndecl, tree exp)
7436 {
7437 /* The argument must be a nonnegative integer constant.
7438 It counts the number of frames to scan up the stack.
7439 The value is either the frame pointer value or the return
7440 address saved in that frame. */
7441 if (call_expr_nargs (exp) == 0)
7442 /* Warning about missing arg was already issued. */
7443 return const0_rtx;
7444 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
7445 {
7446 error ("invalid argument to %qD", fndecl);
7447 return const0_rtx;
7448 }
7449 else
7450 {
7451 /* Number of frames to scan up the stack. */
7452 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
7453
7454 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
7455
7456 /* Some ports cannot access arbitrary stack frames. */
7457 if (tem == NULL)
7458 {
7459 warning (0, "unsupported argument to %qD", fndecl);
7460 return const0_rtx;
7461 }
7462
7463 if (count)
7464 {
7465 /* Warn since no effort is made to ensure that any frame
7466 beyond the current one exists or can be safely reached. */
7467 warning (OPT_Wframe_address, "calling %qD with "
7468 "a nonzero argument is unsafe", fndecl);
7469 }
7470
7471 /* For __builtin_frame_address, return what we've got. */
7472 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7473 return tem;
7474
7475 if (!REG_P (tem)
7476 && ! CONSTANT_P (tem))
7477 tem = copy_addr_to_reg (tem);
7478 return tem;
7479 }
7480 }
7481
7482 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
7483 failed and the caller should emit a normal call. */
7484
7485 static rtx
7486 expand_builtin_alloca (tree exp)
7487 {
7488 rtx op0;
7489 rtx result;
7490 unsigned int align;
7491 tree fndecl = get_callee_fndecl (exp);
7492 HOST_WIDE_INT max_size;
7493 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7494 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
7495 bool valid_arglist
7496 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7497 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
7498 VOID_TYPE)
7499 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
7500 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
7501 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
7502
7503 if (!valid_arglist)
7504 return NULL_RTX;
7505
7506 if ((alloca_for_var
7507 && warn_vla_limit >= HOST_WIDE_INT_MAX
7508 && warn_alloc_size_limit < warn_vla_limit)
7509 || (!alloca_for_var
7510 && warn_alloca_limit >= HOST_WIDE_INT_MAX
7511 && warn_alloc_size_limit < warn_alloca_limit
7512 ))
7513 {
7514 /* -Walloca-larger-than and -Wvla-larger-than settings of
7515 less than HOST_WIDE_INT_MAX override the more general
7516 -Walloc-size-larger-than so unless either of the former
7517 options is smaller than the last one (wchich would imply
7518 that the call was already checked), check the alloca
7519 arguments for overflow. */
7520 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
7521 int idx[] = { 0, -1 };
7522 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
7523 }
7524
7525 /* Compute the argument. */
7526 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
7527
7528 /* Compute the alignment. */
7529 align = (fcode == BUILT_IN_ALLOCA
7530 ? BIGGEST_ALIGNMENT
7531 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
7532
7533 /* Compute the maximum size. */
7534 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7535 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
7536 : -1);
7537
7538 /* Allocate the desired space. If the allocation stems from the declaration
7539 of a variable-sized object, it cannot accumulate. */
7540 result
7541 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
7542 result = convert_memory_address (ptr_mode, result);
7543
7544 /* Dynamic allocations for variables are recorded during gimplification. */
7545 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
7546 record_dynamic_alloc (exp);
7547
7548 return result;
7549 }
7550
7551 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
7552 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
7553 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
7554 handle_builtin_stack_restore function. */
7555
7556 static rtx
7557 expand_asan_emit_allocas_unpoison (tree exp)
7558 {
7559 tree arg0 = CALL_EXPR_ARG (exp, 0);
7560 tree arg1 = CALL_EXPR_ARG (exp, 1);
7561 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7562 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7563 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
7564 stack_pointer_rtx, NULL_RTX, 0,
7565 OPTAB_LIB_WIDEN);
7566 off = convert_modes (ptr_mode, Pmode, off, 0);
7567 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
7568 OPTAB_LIB_WIDEN);
7569 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
7570 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
7571 top, ptr_mode, bot, ptr_mode);
7572 return ret;
7573 }
7574
7575 /* Expand a call to bswap builtin in EXP.
7576 Return NULL_RTX if a normal call should be emitted rather than expanding the
7577 function in-line. If convenient, the result should be placed in TARGET.
7578 SUBTARGET may be used as the target for computing one of EXP's operands. */
7579
7580 static rtx
7581 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
7582 rtx subtarget)
7583 {
7584 tree arg;
7585 rtx op0;
7586
7587 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7588 return NULL_RTX;
7589
7590 arg = CALL_EXPR_ARG (exp, 0);
7591 op0 = expand_expr (arg,
7592 subtarget && GET_MODE (subtarget) == target_mode
7593 ? subtarget : NULL_RTX,
7594 target_mode, EXPAND_NORMAL);
7595 if (GET_MODE (op0) != target_mode)
7596 op0 = convert_to_mode (target_mode, op0, 1);
7597
7598 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
7599
7600 gcc_assert (target);
7601
7602 return convert_to_mode (target_mode, target, 1);
7603 }
7604
7605 /* Expand a call to a unary builtin in EXP.
7606 Return NULL_RTX if a normal call should be emitted rather than expanding the
7607 function in-line. If convenient, the result should be placed in TARGET.
7608 SUBTARGET may be used as the target for computing one of EXP's operands. */
7609
7610 static rtx
7611 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
7612 rtx subtarget, optab op_optab)
7613 {
7614 rtx op0;
7615
7616 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7617 return NULL_RTX;
7618
7619 /* Compute the argument. */
7620 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
7621 (subtarget
7622 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
7623 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
7624 VOIDmode, EXPAND_NORMAL);
7625 /* Compute op, into TARGET if possible.
7626 Set TARGET to wherever the result comes back. */
7627 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
7628 op_optab, op0, target, op_optab != clrsb_optab);
7629 gcc_assert (target);
7630
7631 return convert_to_mode (target_mode, target, 0);
7632 }
7633
7634 /* Expand a call to __builtin_expect. We just return our argument
7635 as the builtin_expect semantic should've been already executed by
7636 tree branch prediction pass. */
7637
7638 static rtx
7639 expand_builtin_expect (tree exp, rtx target)
7640 {
7641 tree arg;
7642
7643 if (call_expr_nargs (exp) < 2)
7644 return const0_rtx;
7645 arg = CALL_EXPR_ARG (exp, 0);
7646
7647 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7648 /* When guessing was done, the hints should be already stripped away. */
7649 gcc_assert (!flag_guess_branch_prob
7650 || optimize == 0 || seen_error ());
7651 return target;
7652 }
7653
7654 /* Expand a call to __builtin_expect_with_probability. We just return our
7655 argument as the builtin_expect semantic should've been already executed by
7656 tree branch prediction pass. */
7657
7658 static rtx
7659 expand_builtin_expect_with_probability (tree exp, rtx target)
7660 {
7661 tree arg;
7662
7663 if (call_expr_nargs (exp) < 3)
7664 return const0_rtx;
7665 arg = CALL_EXPR_ARG (exp, 0);
7666
7667 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7668 /* When guessing was done, the hints should be already stripped away. */
7669 gcc_assert (!flag_guess_branch_prob
7670 || optimize == 0 || seen_error ());
7671 return target;
7672 }
7673
7674
7675 /* Expand a call to __builtin_assume_aligned. We just return our first
7676 argument as the builtin_assume_aligned semantic should've been already
7677 executed by CCP. */
7678
7679 static rtx
7680 expand_builtin_assume_aligned (tree exp, rtx target)
7681 {
7682 if (call_expr_nargs (exp) < 2)
7683 return const0_rtx;
7684 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
7685 EXPAND_NORMAL);
7686 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
7687 && (call_expr_nargs (exp) < 3
7688 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
7689 return target;
7690 }
7691
7692 void
7693 expand_builtin_trap (void)
7694 {
7695 if (targetm.have_trap ())
7696 {
7697 rtx_insn *insn = emit_insn (targetm.gen_trap ());
7698 /* For trap insns when not accumulating outgoing args force
7699 REG_ARGS_SIZE note to prevent crossjumping of calls with
7700 different args sizes. */
7701 if (!ACCUMULATE_OUTGOING_ARGS)
7702 add_args_size_note (insn, stack_pointer_delta);
7703 }
7704 else
7705 {
7706 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
7707 tree call_expr = build_call_expr (fn, 0);
7708 expand_call (call_expr, NULL_RTX, false);
7709 }
7710
7711 emit_barrier ();
7712 }
7713
7714 /* Expand a call to __builtin_unreachable. We do nothing except emit
7715 a barrier saying that control flow will not pass here.
7716
7717 It is the responsibility of the program being compiled to ensure
7718 that control flow does never reach __builtin_unreachable. */
7719 static void
7720 expand_builtin_unreachable (void)
7721 {
7722 emit_barrier ();
7723 }
7724
7725 /* Expand EXP, a call to fabs, fabsf or fabsl.
7726 Return NULL_RTX if a normal call should be emitted rather than expanding
7727 the function inline. If convenient, the result should be placed
7728 in TARGET. SUBTARGET may be used as the target for computing
7729 the operand. */
7730
7731 static rtx
7732 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
7733 {
7734 machine_mode mode;
7735 tree arg;
7736 rtx op0;
7737
7738 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
7739 return NULL_RTX;
7740
7741 arg = CALL_EXPR_ARG (exp, 0);
7742 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7743 mode = TYPE_MODE (TREE_TYPE (arg));
7744 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7745 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
7746 }
7747
7748 /* Expand EXP, a call to copysign, copysignf, or copysignl.
7749 Return NULL is a normal call should be emitted rather than expanding the
7750 function inline. If convenient, the result should be placed in TARGET.
7751 SUBTARGET may be used as the target for computing the operand. */
7752
7753 static rtx
7754 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
7755 {
7756 rtx op0, op1;
7757 tree arg;
7758
7759 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
7760 return NULL_RTX;
7761
7762 arg = CALL_EXPR_ARG (exp, 0);
7763 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7764
7765 arg = CALL_EXPR_ARG (exp, 1);
7766 op1 = expand_normal (arg);
7767
7768 return expand_copysign (op0, op1, target);
7769 }
7770
7771 /* Emit a call to __builtin___clear_cache. */
7772
7773 void
7774 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
7775 {
7776 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
7777 BUILTIN_ASM_NAME_PTR
7778 (BUILT_IN_CLEAR_CACHE));
7779
7780 emit_library_call (callee,
7781 LCT_NORMAL, VOIDmode,
7782 convert_memory_address (ptr_mode, begin), ptr_mode,
7783 convert_memory_address (ptr_mode, end), ptr_mode);
7784 }
7785
7786 /* Emit a call to __builtin___clear_cache, unless the target specifies
7787 it as do-nothing. This function can be used by trampoline
7788 finalizers to duplicate the effects of expanding a call to the
7789 clear_cache builtin. */
7790
7791 void
7792 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
7793 {
7794 if ((GET_MODE (begin) != ptr_mode && GET_MODE (begin) != Pmode)
7795 || (GET_MODE (end) != ptr_mode && GET_MODE (end) != Pmode))
7796 {
7797 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7798 return;
7799 }
7800
7801 if (targetm.have_clear_cache ())
7802 {
7803 /* We have a "clear_cache" insn, and it will handle everything. */
7804 class expand_operand ops[2];
7805
7806 create_address_operand (&ops[0], begin);
7807 create_address_operand (&ops[1], end);
7808
7809 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
7810 return;
7811 }
7812 else
7813 {
7814 #ifndef CLEAR_INSN_CACHE
7815 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
7816 does nothing. There is no need to call it. Do nothing. */
7817 return;
7818 #endif /* CLEAR_INSN_CACHE */
7819 }
7820
7821 targetm.calls.emit_call_builtin___clear_cache (begin, end);
7822 }
7823
7824 /* Expand a call to __builtin___clear_cache. */
7825
7826 static void
7827 expand_builtin___clear_cache (tree exp)
7828 {
7829 tree begin, end;
7830 rtx begin_rtx, end_rtx;
7831
7832 /* We must not expand to a library call. If we did, any
7833 fallback library function in libgcc that might contain a call to
7834 __builtin___clear_cache() would recurse infinitely. */
7835 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7836 {
7837 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7838 return;
7839 }
7840
7841 begin = CALL_EXPR_ARG (exp, 0);
7842 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
7843
7844 end = CALL_EXPR_ARG (exp, 1);
7845 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
7846
7847 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
7848 }
7849
7850 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
7851
7852 static rtx
7853 round_trampoline_addr (rtx tramp)
7854 {
7855 rtx temp, addend, mask;
7856
7857 /* If we don't need too much alignment, we'll have been guaranteed
7858 proper alignment by get_trampoline_type. */
7859 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
7860 return tramp;
7861
7862 /* Round address up to desired boundary. */
7863 temp = gen_reg_rtx (Pmode);
7864 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
7865 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
7866
7867 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
7868 temp, 0, OPTAB_LIB_WIDEN);
7869 tramp = expand_simple_binop (Pmode, AND, temp, mask,
7870 temp, 0, OPTAB_LIB_WIDEN);
7871
7872 return tramp;
7873 }
7874
7875 static rtx
7876 expand_builtin_init_trampoline (tree exp, bool onstack)
7877 {
7878 tree t_tramp, t_func, t_chain;
7879 rtx m_tramp, r_tramp, r_chain, tmp;
7880
7881 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
7882 POINTER_TYPE, VOID_TYPE))
7883 return NULL_RTX;
7884
7885 t_tramp = CALL_EXPR_ARG (exp, 0);
7886 t_func = CALL_EXPR_ARG (exp, 1);
7887 t_chain = CALL_EXPR_ARG (exp, 2);
7888
7889 r_tramp = expand_normal (t_tramp);
7890 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
7891 MEM_NOTRAP_P (m_tramp) = 1;
7892
7893 /* If ONSTACK, the TRAMP argument should be the address of a field
7894 within the local function's FRAME decl. Either way, let's see if
7895 we can fill in the MEM_ATTRs for this memory. */
7896 if (TREE_CODE (t_tramp) == ADDR_EXPR)
7897 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
7898
7899 /* Creator of a heap trampoline is responsible for making sure the
7900 address is aligned to at least STACK_BOUNDARY. Normally malloc
7901 will ensure this anyhow. */
7902 tmp = round_trampoline_addr (r_tramp);
7903 if (tmp != r_tramp)
7904 {
7905 m_tramp = change_address (m_tramp, BLKmode, tmp);
7906 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
7907 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
7908 }
7909
7910 /* The FUNC argument should be the address of the nested function.
7911 Extract the actual function decl to pass to the hook. */
7912 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
7913 t_func = TREE_OPERAND (t_func, 0);
7914 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
7915
7916 r_chain = expand_normal (t_chain);
7917
7918 /* Generate insns to initialize the trampoline. */
7919 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
7920
7921 if (onstack)
7922 {
7923 trampolines_created = 1;
7924
7925 if (targetm.calls.custom_function_descriptors != 0)
7926 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
7927 "trampoline generated for nested function %qD", t_func);
7928 }
7929
7930 return const0_rtx;
7931 }
7932
7933 static rtx
7934 expand_builtin_adjust_trampoline (tree exp)
7935 {
7936 rtx tramp;
7937
7938 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7939 return NULL_RTX;
7940
7941 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
7942 tramp = round_trampoline_addr (tramp);
7943 if (targetm.calls.trampoline_adjust_address)
7944 tramp = targetm.calls.trampoline_adjust_address (tramp);
7945
7946 return tramp;
7947 }
7948
7949 /* Expand a call to the builtin descriptor initialization routine.
7950 A descriptor is made up of a couple of pointers to the static
7951 chain and the code entry in this order. */
7952
7953 static rtx
7954 expand_builtin_init_descriptor (tree exp)
7955 {
7956 tree t_descr, t_func, t_chain;
7957 rtx m_descr, r_descr, r_func, r_chain;
7958
7959 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
7960 VOID_TYPE))
7961 return NULL_RTX;
7962
7963 t_descr = CALL_EXPR_ARG (exp, 0);
7964 t_func = CALL_EXPR_ARG (exp, 1);
7965 t_chain = CALL_EXPR_ARG (exp, 2);
7966
7967 r_descr = expand_normal (t_descr);
7968 m_descr = gen_rtx_MEM (BLKmode, r_descr);
7969 MEM_NOTRAP_P (m_descr) = 1;
7970 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
7971
7972 r_func = expand_normal (t_func);
7973 r_chain = expand_normal (t_chain);
7974
7975 /* Generate insns to initialize the descriptor. */
7976 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
7977 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
7978 POINTER_SIZE / BITS_PER_UNIT), r_func);
7979
7980 return const0_rtx;
7981 }
7982
7983 /* Expand a call to the builtin descriptor adjustment routine. */
7984
7985 static rtx
7986 expand_builtin_adjust_descriptor (tree exp)
7987 {
7988 rtx tramp;
7989
7990 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7991 return NULL_RTX;
7992
7993 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
7994
7995 /* Unalign the descriptor to allow runtime identification. */
7996 tramp = plus_constant (ptr_mode, tramp,
7997 targetm.calls.custom_function_descriptors);
7998
7999 return force_operand (tramp, NULL_RTX);
8000 }
8001
8002 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
8003 function. The function first checks whether the back end provides
8004 an insn to implement signbit for the respective mode. If not, it
8005 checks whether the floating point format of the value is such that
8006 the sign bit can be extracted. If that is not the case, error out.
8007 EXP is the expression that is a call to the builtin function; if
8008 convenient, the result should be placed in TARGET. */
8009 static rtx
8010 expand_builtin_signbit (tree exp, rtx target)
8011 {
8012 const struct real_format *fmt;
8013 scalar_float_mode fmode;
8014 scalar_int_mode rmode, imode;
8015 tree arg;
8016 int word, bitpos;
8017 enum insn_code icode;
8018 rtx temp;
8019 location_t loc = EXPR_LOCATION (exp);
8020
8021 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
8022 return NULL_RTX;
8023
8024 arg = CALL_EXPR_ARG (exp, 0);
8025 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
8026 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
8027 fmt = REAL_MODE_FORMAT (fmode);
8028
8029 arg = builtin_save_expr (arg);
8030
8031 /* Expand the argument yielding a RTX expression. */
8032 temp = expand_normal (arg);
8033
8034 /* Check if the back end provides an insn that handles signbit for the
8035 argument's mode. */
8036 icode = optab_handler (signbit_optab, fmode);
8037 if (icode != CODE_FOR_nothing)
8038 {
8039 rtx_insn *last = get_last_insn ();
8040 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8041 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
8042 return target;
8043 delete_insns_since (last);
8044 }
8045
8046 /* For floating point formats without a sign bit, implement signbit
8047 as "ARG < 0.0". */
8048 bitpos = fmt->signbit_ro;
8049 if (bitpos < 0)
8050 {
8051 /* But we can't do this if the format supports signed zero. */
8052 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
8053
8054 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
8055 build_real (TREE_TYPE (arg), dconst0));
8056 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
8057 }
8058
8059 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
8060 {
8061 imode = int_mode_for_mode (fmode).require ();
8062 temp = gen_lowpart (imode, temp);
8063 }
8064 else
8065 {
8066 imode = word_mode;
8067 /* Handle targets with different FP word orders. */
8068 if (FLOAT_WORDS_BIG_ENDIAN)
8069 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
8070 else
8071 word = bitpos / BITS_PER_WORD;
8072 temp = operand_subword_force (temp, word, fmode);
8073 bitpos = bitpos % BITS_PER_WORD;
8074 }
8075
8076 /* Force the intermediate word_mode (or narrower) result into a
8077 register. This avoids attempting to create paradoxical SUBREGs
8078 of floating point modes below. */
8079 temp = force_reg (imode, temp);
8080
8081 /* If the bitpos is within the "result mode" lowpart, the operation
8082 can be implement with a single bitwise AND. Otherwise, we need
8083 a right shift and an AND. */
8084
8085 if (bitpos < GET_MODE_BITSIZE (rmode))
8086 {
8087 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
8088
8089 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
8090 temp = gen_lowpart (rmode, temp);
8091 temp = expand_binop (rmode, and_optab, temp,
8092 immed_wide_int_const (mask, rmode),
8093 NULL_RTX, 1, OPTAB_LIB_WIDEN);
8094 }
8095 else
8096 {
8097 /* Perform a logical right shift to place the signbit in the least
8098 significant bit, then truncate the result to the desired mode
8099 and mask just this bit. */
8100 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
8101 temp = gen_lowpart (rmode, temp);
8102 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
8103 NULL_RTX, 1, OPTAB_LIB_WIDEN);
8104 }
8105
8106 return temp;
8107 }
8108
8109 /* Expand fork or exec calls. TARGET is the desired target of the
8110 call. EXP is the call. FN is the
8111 identificator of the actual function. IGNORE is nonzero if the
8112 value is to be ignored. */
8113
8114 static rtx
8115 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
8116 {
8117 tree id, decl;
8118 tree call;
8119
8120 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
8121 {
8122 tree path = CALL_EXPR_ARG (exp, 0);
8123 /* Detect unterminated path. */
8124 if (!check_read_access (exp, path))
8125 return NULL_RTX;
8126
8127 /* Also detect unterminated first argument. */
8128 switch (DECL_FUNCTION_CODE (fn))
8129 {
8130 case BUILT_IN_EXECL:
8131 case BUILT_IN_EXECLE:
8132 case BUILT_IN_EXECLP:
8133 if (!check_read_access (exp, path))
8134 return NULL_RTX;
8135 default:
8136 break;
8137 }
8138 }
8139
8140
8141 /* If we are not profiling, just call the function. */
8142 if (!profile_arc_flag)
8143 return NULL_RTX;
8144
8145 /* Otherwise call the wrapper. This should be equivalent for the rest of
8146 compiler, so the code does not diverge, and the wrapper may run the
8147 code necessary for keeping the profiling sane. */
8148
8149 switch (DECL_FUNCTION_CODE (fn))
8150 {
8151 case BUILT_IN_FORK:
8152 id = get_identifier ("__gcov_fork");
8153 break;
8154
8155 case BUILT_IN_EXECL:
8156 id = get_identifier ("__gcov_execl");
8157 break;
8158
8159 case BUILT_IN_EXECV:
8160 id = get_identifier ("__gcov_execv");
8161 break;
8162
8163 case BUILT_IN_EXECLP:
8164 id = get_identifier ("__gcov_execlp");
8165 break;
8166
8167 case BUILT_IN_EXECLE:
8168 id = get_identifier ("__gcov_execle");
8169 break;
8170
8171 case BUILT_IN_EXECVP:
8172 id = get_identifier ("__gcov_execvp");
8173 break;
8174
8175 case BUILT_IN_EXECVE:
8176 id = get_identifier ("__gcov_execve");
8177 break;
8178
8179 default:
8180 gcc_unreachable ();
8181 }
8182
8183 decl = build_decl (DECL_SOURCE_LOCATION (fn),
8184 FUNCTION_DECL, id, TREE_TYPE (fn));
8185 DECL_EXTERNAL (decl) = 1;
8186 TREE_PUBLIC (decl) = 1;
8187 DECL_ARTIFICIAL (decl) = 1;
8188 TREE_NOTHROW (decl) = 1;
8189 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
8190 DECL_VISIBILITY_SPECIFIED (decl) = 1;
8191 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
8192 return expand_call (call, target, ignore);
8193 }
8194
8195
8196 \f
8197 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
8198 the pointer in these functions is void*, the tree optimizers may remove
8199 casts. The mode computed in expand_builtin isn't reliable either, due
8200 to __sync_bool_compare_and_swap.
8201
8202 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
8203 group of builtins. This gives us log2 of the mode size. */
8204
8205 static inline machine_mode
8206 get_builtin_sync_mode (int fcode_diff)
8207 {
8208 /* The size is not negotiable, so ask not to get BLKmode in return
8209 if the target indicates that a smaller size would be better. */
8210 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
8211 }
8212
8213 /* Expand the memory expression LOC and return the appropriate memory operand
8214 for the builtin_sync operations. */
8215
8216 static rtx
8217 get_builtin_sync_mem (tree loc, machine_mode mode)
8218 {
8219 rtx addr, mem;
8220 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
8221 ? TREE_TYPE (TREE_TYPE (loc))
8222 : TREE_TYPE (loc));
8223 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
8224
8225 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
8226 addr = convert_memory_address (addr_mode, addr);
8227
8228 /* Note that we explicitly do not want any alias information for this
8229 memory, so that we kill all other live memories. Otherwise we don't
8230 satisfy the full barrier semantics of the intrinsic. */
8231 mem = gen_rtx_MEM (mode, addr);
8232
8233 set_mem_addr_space (mem, addr_space);
8234
8235 mem = validize_mem (mem);
8236
8237 /* The alignment needs to be at least according to that of the mode. */
8238 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
8239 get_pointer_alignment (loc)));
8240 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
8241 MEM_VOLATILE_P (mem) = 1;
8242
8243 return mem;
8244 }
8245
8246 /* Make sure an argument is in the right mode.
8247 EXP is the tree argument.
8248 MODE is the mode it should be in. */
8249
8250 static rtx
8251 expand_expr_force_mode (tree exp, machine_mode mode)
8252 {
8253 rtx val;
8254 machine_mode old_mode;
8255
8256 if (TREE_CODE (exp) == SSA_NAME
8257 && TYPE_MODE (TREE_TYPE (exp)) != mode)
8258 {
8259 /* Undo argument promotion if possible, as combine might not
8260 be able to do it later due to MEM_VOLATILE_P uses in the
8261 patterns. */
8262 gimple *g = get_gimple_for_ssa_name (exp);
8263 if (g && gimple_assign_cast_p (g))
8264 {
8265 tree rhs = gimple_assign_rhs1 (g);
8266 tree_code code = gimple_assign_rhs_code (g);
8267 if (CONVERT_EXPR_CODE_P (code)
8268 && TYPE_MODE (TREE_TYPE (rhs)) == mode
8269 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
8270 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
8271 && (TYPE_PRECISION (TREE_TYPE (exp))
8272 > TYPE_PRECISION (TREE_TYPE (rhs))))
8273 exp = rhs;
8274 }
8275 }
8276
8277 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
8278 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
8279 of CONST_INTs, where we know the old_mode only from the call argument. */
8280
8281 old_mode = GET_MODE (val);
8282 if (old_mode == VOIDmode)
8283 old_mode = TYPE_MODE (TREE_TYPE (exp));
8284 val = convert_modes (mode, old_mode, val, 1);
8285 return val;
8286 }
8287
8288
8289 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
8290 EXP is the CALL_EXPR. CODE is the rtx code
8291 that corresponds to the arithmetic or logical operation from the name;
8292 an exception here is that NOT actually means NAND. TARGET is an optional
8293 place for us to store the results; AFTER is true if this is the
8294 fetch_and_xxx form. */
8295
8296 static rtx
8297 expand_builtin_sync_operation (machine_mode mode, tree exp,
8298 enum rtx_code code, bool after,
8299 rtx target)
8300 {
8301 rtx val, mem;
8302 location_t loc = EXPR_LOCATION (exp);
8303
8304 if (code == NOT && warn_sync_nand)
8305 {
8306 tree fndecl = get_callee_fndecl (exp);
8307 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8308
8309 static bool warned_f_a_n, warned_n_a_f;
8310
8311 switch (fcode)
8312 {
8313 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8314 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8315 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8316 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8317 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8318 if (warned_f_a_n)
8319 break;
8320
8321 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
8322 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8323 warned_f_a_n = true;
8324 break;
8325
8326 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8327 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8328 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8329 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8330 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8331 if (warned_n_a_f)
8332 break;
8333
8334 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
8335 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8336 warned_n_a_f = true;
8337 break;
8338
8339 default:
8340 gcc_unreachable ();
8341 }
8342 }
8343
8344 /* Expand the operands. */
8345 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8346 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8347
8348 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
8349 after);
8350 }
8351
8352 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
8353 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
8354 true if this is the boolean form. TARGET is a place for us to store the
8355 results; this is NOT optional if IS_BOOL is true. */
8356
8357 static rtx
8358 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
8359 bool is_bool, rtx target)
8360 {
8361 rtx old_val, new_val, mem;
8362 rtx *pbool, *poval;
8363
8364 /* Expand the operands. */
8365 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8366 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8367 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8368
8369 pbool = poval = NULL;
8370 if (target != const0_rtx)
8371 {
8372 if (is_bool)
8373 pbool = &target;
8374 else
8375 poval = &target;
8376 }
8377 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
8378 false, MEMMODEL_SYNC_SEQ_CST,
8379 MEMMODEL_SYNC_SEQ_CST))
8380 return NULL_RTX;
8381
8382 return target;
8383 }
8384
8385 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
8386 general form is actually an atomic exchange, and some targets only
8387 support a reduced form with the second argument being a constant 1.
8388 EXP is the CALL_EXPR; TARGET is an optional place for us to store
8389 the results. */
8390
8391 static rtx
8392 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
8393 rtx target)
8394 {
8395 rtx val, mem;
8396
8397 /* Expand the operands. */
8398 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8399 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8400
8401 return expand_sync_lock_test_and_set (target, mem, val);
8402 }
8403
8404 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
8405
8406 static void
8407 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
8408 {
8409 rtx mem;
8410
8411 /* Expand the operands. */
8412 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8413
8414 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
8415 }
8416
8417 /* Given an integer representing an ``enum memmodel'', verify its
8418 correctness and return the memory model enum. */
8419
8420 static enum memmodel
8421 get_memmodel (tree exp)
8422 {
8423 rtx op;
8424 unsigned HOST_WIDE_INT val;
8425 location_t loc
8426 = expansion_point_location_if_in_system_header (input_location);
8427
8428 /* If the parameter is not a constant, it's a run time value so we'll just
8429 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
8430 if (TREE_CODE (exp) != INTEGER_CST)
8431 return MEMMODEL_SEQ_CST;
8432
8433 op = expand_normal (exp);
8434
8435 val = INTVAL (op);
8436 if (targetm.memmodel_check)
8437 val = targetm.memmodel_check (val);
8438 else if (val & ~MEMMODEL_MASK)
8439 {
8440 warning_at (loc, OPT_Winvalid_memory_model,
8441 "unknown architecture specifier in memory model to builtin");
8442 return MEMMODEL_SEQ_CST;
8443 }
8444
8445 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
8446 if (memmodel_base (val) >= MEMMODEL_LAST)
8447 {
8448 warning_at (loc, OPT_Winvalid_memory_model,
8449 "invalid memory model argument to builtin");
8450 return MEMMODEL_SEQ_CST;
8451 }
8452
8453 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
8454 be conservative and promote consume to acquire. */
8455 if (val == MEMMODEL_CONSUME)
8456 val = MEMMODEL_ACQUIRE;
8457
8458 return (enum memmodel) val;
8459 }
8460
8461 /* Expand the __atomic_exchange intrinsic:
8462 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
8463 EXP is the CALL_EXPR.
8464 TARGET is an optional place for us to store the results. */
8465
8466 static rtx
8467 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
8468 {
8469 rtx val, mem;
8470 enum memmodel model;
8471
8472 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8473
8474 if (!flag_inline_atomics)
8475 return NULL_RTX;
8476
8477 /* Expand the operands. */
8478 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8479 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8480
8481 return expand_atomic_exchange (target, mem, val, model);
8482 }
8483
8484 /* Expand the __atomic_compare_exchange intrinsic:
8485 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
8486 TYPE desired, BOOL weak,
8487 enum memmodel success,
8488 enum memmodel failure)
8489 EXP is the CALL_EXPR.
8490 TARGET is an optional place for us to store the results. */
8491
8492 static rtx
8493 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
8494 rtx target)
8495 {
8496 rtx expect, desired, mem, oldval;
8497 rtx_code_label *label;
8498 enum memmodel success, failure;
8499 tree weak;
8500 bool is_weak;
8501 location_t loc
8502 = expansion_point_location_if_in_system_header (input_location);
8503
8504 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
8505 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
8506
8507 if (failure > success)
8508 {
8509 warning_at (loc, OPT_Winvalid_memory_model,
8510 "failure memory model cannot be stronger than success "
8511 "memory model for %<__atomic_compare_exchange%>");
8512 success = MEMMODEL_SEQ_CST;
8513 }
8514
8515 if (is_mm_release (failure) || is_mm_acq_rel (failure))
8516 {
8517 warning_at (loc, OPT_Winvalid_memory_model,
8518 "invalid failure memory model for "
8519 "%<__atomic_compare_exchange%>");
8520 failure = MEMMODEL_SEQ_CST;
8521 success = MEMMODEL_SEQ_CST;
8522 }
8523
8524
8525 if (!flag_inline_atomics)
8526 return NULL_RTX;
8527
8528 /* Expand the operands. */
8529 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8530
8531 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
8532 expect = convert_memory_address (Pmode, expect);
8533 expect = gen_rtx_MEM (mode, expect);
8534 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8535
8536 weak = CALL_EXPR_ARG (exp, 3);
8537 is_weak = false;
8538 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
8539 is_weak = true;
8540
8541 if (target == const0_rtx)
8542 target = NULL;
8543
8544 /* Lest the rtl backend create a race condition with an imporoper store
8545 to memory, always create a new pseudo for OLDVAL. */
8546 oldval = NULL;
8547
8548 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
8549 is_weak, success, failure))
8550 return NULL_RTX;
8551
8552 /* Conditionally store back to EXPECT, lest we create a race condition
8553 with an improper store to memory. */
8554 /* ??? With a rearrangement of atomics at the gimple level, we can handle
8555 the normal case where EXPECT is totally private, i.e. a register. At
8556 which point the store can be unconditional. */
8557 label = gen_label_rtx ();
8558 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
8559 GET_MODE (target), 1, label);
8560 emit_move_insn (expect, oldval);
8561 emit_label (label);
8562
8563 return target;
8564 }
8565
8566 /* Helper function for expand_ifn_atomic_compare_exchange - expand
8567 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
8568 call. The weak parameter must be dropped to match the expected parameter
8569 list and the expected argument changed from value to pointer to memory
8570 slot. */
8571
8572 static void
8573 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
8574 {
8575 unsigned int z;
8576 vec<tree, va_gc> *vec;
8577
8578 vec_alloc (vec, 5);
8579 vec->quick_push (gimple_call_arg (call, 0));
8580 tree expected = gimple_call_arg (call, 1);
8581 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
8582 TREE_TYPE (expected));
8583 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
8584 if (expd != x)
8585 emit_move_insn (x, expd);
8586 tree v = make_tree (TREE_TYPE (expected), x);
8587 vec->quick_push (build1 (ADDR_EXPR,
8588 build_pointer_type (TREE_TYPE (expected)), v));
8589 vec->quick_push (gimple_call_arg (call, 2));
8590 /* Skip the boolean weak parameter. */
8591 for (z = 4; z < 6; z++)
8592 vec->quick_push (gimple_call_arg (call, z));
8593 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
8594 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
8595 gcc_assert (bytes_log2 < 5);
8596 built_in_function fncode
8597 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
8598 + bytes_log2);
8599 tree fndecl = builtin_decl_explicit (fncode);
8600 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
8601 fndecl);
8602 tree exp = build_call_vec (boolean_type_node, fn, vec);
8603 tree lhs = gimple_call_lhs (call);
8604 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
8605 if (lhs)
8606 {
8607 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8608 if (GET_MODE (boolret) != mode)
8609 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8610 x = force_reg (mode, x);
8611 write_complex_part (target, boolret, true);
8612 write_complex_part (target, x, false);
8613 }
8614 }
8615
8616 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
8617
8618 void
8619 expand_ifn_atomic_compare_exchange (gcall *call)
8620 {
8621 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
8622 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
8623 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
8624 rtx expect, desired, mem, oldval, boolret;
8625 enum memmodel success, failure;
8626 tree lhs;
8627 bool is_weak;
8628 location_t loc
8629 = expansion_point_location_if_in_system_header (gimple_location (call));
8630
8631 success = get_memmodel (gimple_call_arg (call, 4));
8632 failure = get_memmodel (gimple_call_arg (call, 5));
8633
8634 if (failure > success)
8635 {
8636 warning_at (loc, OPT_Winvalid_memory_model,
8637 "failure memory model cannot be stronger than success "
8638 "memory model for %<__atomic_compare_exchange%>");
8639 success = MEMMODEL_SEQ_CST;
8640 }
8641
8642 if (is_mm_release (failure) || is_mm_acq_rel (failure))
8643 {
8644 warning_at (loc, OPT_Winvalid_memory_model,
8645 "invalid failure memory model for "
8646 "%<__atomic_compare_exchange%>");
8647 failure = MEMMODEL_SEQ_CST;
8648 success = MEMMODEL_SEQ_CST;
8649 }
8650
8651 if (!flag_inline_atomics)
8652 {
8653 expand_ifn_atomic_compare_exchange_into_call (call, mode);
8654 return;
8655 }
8656
8657 /* Expand the operands. */
8658 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
8659
8660 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
8661 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
8662
8663 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
8664
8665 boolret = NULL;
8666 oldval = NULL;
8667
8668 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
8669 is_weak, success, failure))
8670 {
8671 expand_ifn_atomic_compare_exchange_into_call (call, mode);
8672 return;
8673 }
8674
8675 lhs = gimple_call_lhs (call);
8676 if (lhs)
8677 {
8678 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8679 if (GET_MODE (boolret) != mode)
8680 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8681 write_complex_part (target, boolret, true);
8682 write_complex_part (target, oldval, false);
8683 }
8684 }
8685
8686 /* Expand the __atomic_load intrinsic:
8687 TYPE __atomic_load (TYPE *object, enum memmodel)
8688 EXP is the CALL_EXPR.
8689 TARGET is an optional place for us to store the results. */
8690
8691 static rtx
8692 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
8693 {
8694 rtx mem;
8695 enum memmodel model;
8696
8697 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8698 if (is_mm_release (model) || is_mm_acq_rel (model))
8699 {
8700 location_t loc
8701 = expansion_point_location_if_in_system_header (input_location);
8702 warning_at (loc, OPT_Winvalid_memory_model,
8703 "invalid memory model for %<__atomic_load%>");
8704 model = MEMMODEL_SEQ_CST;
8705 }
8706
8707 if (!flag_inline_atomics)
8708 return NULL_RTX;
8709
8710 /* Expand the operand. */
8711 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8712
8713 return expand_atomic_load (target, mem, model);
8714 }
8715
8716
8717 /* Expand the __atomic_store intrinsic:
8718 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
8719 EXP is the CALL_EXPR.
8720 TARGET is an optional place for us to store the results. */
8721
8722 static rtx
8723 expand_builtin_atomic_store (machine_mode mode, tree exp)
8724 {
8725 rtx mem, val;
8726 enum memmodel model;
8727
8728 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8729 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
8730 || is_mm_release (model)))
8731 {
8732 location_t loc
8733 = expansion_point_location_if_in_system_header (input_location);
8734 warning_at (loc, OPT_Winvalid_memory_model,
8735 "invalid memory model for %<__atomic_store%>");
8736 model = MEMMODEL_SEQ_CST;
8737 }
8738
8739 if (!flag_inline_atomics)
8740 return NULL_RTX;
8741
8742 /* Expand the operands. */
8743 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8744 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8745
8746 return expand_atomic_store (mem, val, model, false);
8747 }
8748
8749 /* Expand the __atomic_fetch_XXX intrinsic:
8750 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
8751 EXP is the CALL_EXPR.
8752 TARGET is an optional place for us to store the results.
8753 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
8754 FETCH_AFTER is true if returning the result of the operation.
8755 FETCH_AFTER is false if returning the value before the operation.
8756 IGNORE is true if the result is not used.
8757 EXT_CALL is the correct builtin for an external call if this cannot be
8758 resolved to an instruction sequence. */
8759
8760 static rtx
8761 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
8762 enum rtx_code code, bool fetch_after,
8763 bool ignore, enum built_in_function ext_call)
8764 {
8765 rtx val, mem, ret;
8766 enum memmodel model;
8767 tree fndecl;
8768 tree addr;
8769
8770 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8771
8772 /* Expand the operands. */
8773 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8774 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8775
8776 /* Only try generating instructions if inlining is turned on. */
8777 if (flag_inline_atomics)
8778 {
8779 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
8780 if (ret)
8781 return ret;
8782 }
8783
8784 /* Return if a different routine isn't needed for the library call. */
8785 if (ext_call == BUILT_IN_NONE)
8786 return NULL_RTX;
8787
8788 /* Change the call to the specified function. */
8789 fndecl = get_callee_fndecl (exp);
8790 addr = CALL_EXPR_FN (exp);
8791 STRIP_NOPS (addr);
8792
8793 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
8794 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
8795
8796 /* If we will emit code after the call, the call cannot be a tail call.
8797 If it is emitted as a tail call, a barrier is emitted after it, and
8798 then all trailing code is removed. */
8799 if (!ignore)
8800 CALL_EXPR_TAILCALL (exp) = 0;
8801
8802 /* Expand the call here so we can emit trailing code. */
8803 ret = expand_call (exp, target, ignore);
8804
8805 /* Replace the original function just in case it matters. */
8806 TREE_OPERAND (addr, 0) = fndecl;
8807
8808 /* Then issue the arithmetic correction to return the right result. */
8809 if (!ignore)
8810 {
8811 if (code == NOT)
8812 {
8813 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
8814 OPTAB_LIB_WIDEN);
8815 ret = expand_simple_unop (mode, NOT, ret, target, true);
8816 }
8817 else
8818 ret = expand_simple_binop (mode, code, ret, val, target, true,
8819 OPTAB_LIB_WIDEN);
8820 }
8821 return ret;
8822 }
8823
8824 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
8825
8826 void
8827 expand_ifn_atomic_bit_test_and (gcall *call)
8828 {
8829 tree ptr = gimple_call_arg (call, 0);
8830 tree bit = gimple_call_arg (call, 1);
8831 tree flag = gimple_call_arg (call, 2);
8832 tree lhs = gimple_call_lhs (call);
8833 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
8834 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
8835 enum rtx_code code;
8836 optab optab;
8837 class expand_operand ops[5];
8838
8839 gcc_assert (flag_inline_atomics);
8840
8841 if (gimple_call_num_args (call) == 4)
8842 model = get_memmodel (gimple_call_arg (call, 3));
8843
8844 rtx mem = get_builtin_sync_mem (ptr, mode);
8845 rtx val = expand_expr_force_mode (bit, mode);
8846
8847 switch (gimple_call_internal_fn (call))
8848 {
8849 case IFN_ATOMIC_BIT_TEST_AND_SET:
8850 code = IOR;
8851 optab = atomic_bit_test_and_set_optab;
8852 break;
8853 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
8854 code = XOR;
8855 optab = atomic_bit_test_and_complement_optab;
8856 break;
8857 case IFN_ATOMIC_BIT_TEST_AND_RESET:
8858 code = AND;
8859 optab = atomic_bit_test_and_reset_optab;
8860 break;
8861 default:
8862 gcc_unreachable ();
8863 }
8864
8865 if (lhs == NULL_TREE)
8866 {
8867 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8868 val, NULL_RTX, true, OPTAB_DIRECT);
8869 if (code == AND)
8870 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8871 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
8872 return;
8873 }
8874
8875 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8876 enum insn_code icode = direct_optab_handler (optab, mode);
8877 gcc_assert (icode != CODE_FOR_nothing);
8878 create_output_operand (&ops[0], target, mode);
8879 create_fixed_operand (&ops[1], mem);
8880 create_convert_operand_to (&ops[2], val, mode, true);
8881 create_integer_operand (&ops[3], model);
8882 create_integer_operand (&ops[4], integer_onep (flag));
8883 if (maybe_expand_insn (icode, 5, ops))
8884 return;
8885
8886 rtx bitval = val;
8887 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8888 val, NULL_RTX, true, OPTAB_DIRECT);
8889 rtx maskval = val;
8890 if (code == AND)
8891 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8892 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
8893 code, model, false);
8894 if (integer_onep (flag))
8895 {
8896 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
8897 NULL_RTX, true, OPTAB_DIRECT);
8898 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
8899 true, OPTAB_DIRECT);
8900 }
8901 else
8902 result = expand_simple_binop (mode, AND, result, maskval, target, true,
8903 OPTAB_DIRECT);
8904 if (result != target)
8905 emit_move_insn (target, result);
8906 }
8907
8908 /* Expand an atomic clear operation.
8909 void _atomic_clear (BOOL *obj, enum memmodel)
8910 EXP is the call expression. */
8911
8912 static rtx
8913 expand_builtin_atomic_clear (tree exp)
8914 {
8915 machine_mode mode;
8916 rtx mem, ret;
8917 enum memmodel model;
8918
8919 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
8920 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8921 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8922
8923 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
8924 {
8925 location_t loc
8926 = expansion_point_location_if_in_system_header (input_location);
8927 warning_at (loc, OPT_Winvalid_memory_model,
8928 "invalid memory model for %<__atomic_store%>");
8929 model = MEMMODEL_SEQ_CST;
8930 }
8931
8932 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
8933 Failing that, a store is issued by __atomic_store. The only way this can
8934 fail is if the bool type is larger than a word size. Unlikely, but
8935 handle it anyway for completeness. Assume a single threaded model since
8936 there is no atomic support in this case, and no barriers are required. */
8937 ret = expand_atomic_store (mem, const0_rtx, model, true);
8938 if (!ret)
8939 emit_move_insn (mem, const0_rtx);
8940 return const0_rtx;
8941 }
8942
8943 /* Expand an atomic test_and_set operation.
8944 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
8945 EXP is the call expression. */
8946
8947 static rtx
8948 expand_builtin_atomic_test_and_set (tree exp, rtx target)
8949 {
8950 rtx mem;
8951 enum memmodel model;
8952 machine_mode mode;
8953
8954 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
8955 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8956 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8957
8958 return expand_atomic_test_and_set (target, mem, model);
8959 }
8960
8961
8962 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
8963 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
8964
8965 static tree
8966 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
8967 {
8968 int size;
8969 machine_mode mode;
8970 unsigned int mode_align, type_align;
8971
8972 if (TREE_CODE (arg0) != INTEGER_CST)
8973 return NULL_TREE;
8974
8975 /* We need a corresponding integer mode for the access to be lock-free. */
8976 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
8977 if (!int_mode_for_size (size, 0).exists (&mode))
8978 return boolean_false_node;
8979
8980 mode_align = GET_MODE_ALIGNMENT (mode);
8981
8982 if (TREE_CODE (arg1) == INTEGER_CST)
8983 {
8984 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
8985
8986 /* Either this argument is null, or it's a fake pointer encoding
8987 the alignment of the object. */
8988 val = least_bit_hwi (val);
8989 val *= BITS_PER_UNIT;
8990
8991 if (val == 0 || mode_align < val)
8992 type_align = mode_align;
8993 else
8994 type_align = val;
8995 }
8996 else
8997 {
8998 tree ttype = TREE_TYPE (arg1);
8999
9000 /* This function is usually invoked and folded immediately by the front
9001 end before anything else has a chance to look at it. The pointer
9002 parameter at this point is usually cast to a void *, so check for that
9003 and look past the cast. */
9004 if (CONVERT_EXPR_P (arg1)
9005 && POINTER_TYPE_P (ttype)
9006 && VOID_TYPE_P (TREE_TYPE (ttype))
9007 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
9008 arg1 = TREE_OPERAND (arg1, 0);
9009
9010 ttype = TREE_TYPE (arg1);
9011 gcc_assert (POINTER_TYPE_P (ttype));
9012
9013 /* Get the underlying type of the object. */
9014 ttype = TREE_TYPE (ttype);
9015 type_align = TYPE_ALIGN (ttype);
9016 }
9017
9018 /* If the object has smaller alignment, the lock free routines cannot
9019 be used. */
9020 if (type_align < mode_align)
9021 return boolean_false_node;
9022
9023 /* Check if a compare_and_swap pattern exists for the mode which represents
9024 the required size. The pattern is not allowed to fail, so the existence
9025 of the pattern indicates support is present. Also require that an
9026 atomic load exists for the required size. */
9027 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
9028 return boolean_true_node;
9029 else
9030 return boolean_false_node;
9031 }
9032
9033 /* Return true if the parameters to call EXP represent an object which will
9034 always generate lock free instructions. The first argument represents the
9035 size of the object, and the second parameter is a pointer to the object
9036 itself. If NULL is passed for the object, then the result is based on
9037 typical alignment for an object of the specified size. Otherwise return
9038 false. */
9039
9040 static rtx
9041 expand_builtin_atomic_always_lock_free (tree exp)
9042 {
9043 tree size;
9044 tree arg0 = CALL_EXPR_ARG (exp, 0);
9045 tree arg1 = CALL_EXPR_ARG (exp, 1);
9046
9047 if (TREE_CODE (arg0) != INTEGER_CST)
9048 {
9049 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
9050 return const0_rtx;
9051 }
9052
9053 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
9054 if (size == boolean_true_node)
9055 return const1_rtx;
9056 return const0_rtx;
9057 }
9058
9059 /* Return a one or zero if it can be determined that object ARG1 of size ARG
9060 is lock free on this architecture. */
9061
9062 static tree
9063 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
9064 {
9065 if (!flag_inline_atomics)
9066 return NULL_TREE;
9067
9068 /* If it isn't always lock free, don't generate a result. */
9069 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
9070 return boolean_true_node;
9071
9072 return NULL_TREE;
9073 }
9074
9075 /* Return true if the parameters to call EXP represent an object which will
9076 always generate lock free instructions. The first argument represents the
9077 size of the object, and the second parameter is a pointer to the object
9078 itself. If NULL is passed for the object, then the result is based on
9079 typical alignment for an object of the specified size. Otherwise return
9080 NULL*/
9081
9082 static rtx
9083 expand_builtin_atomic_is_lock_free (tree exp)
9084 {
9085 tree size;
9086 tree arg0 = CALL_EXPR_ARG (exp, 0);
9087 tree arg1 = CALL_EXPR_ARG (exp, 1);
9088
9089 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9090 {
9091 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
9092 return NULL_RTX;
9093 }
9094
9095 if (!flag_inline_atomics)
9096 return NULL_RTX;
9097
9098 /* If the value is known at compile time, return the RTX for it. */
9099 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
9100 if (size == boolean_true_node)
9101 return const1_rtx;
9102
9103 return NULL_RTX;
9104 }
9105
9106 /* Expand the __atomic_thread_fence intrinsic:
9107 void __atomic_thread_fence (enum memmodel)
9108 EXP is the CALL_EXPR. */
9109
9110 static void
9111 expand_builtin_atomic_thread_fence (tree exp)
9112 {
9113 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9114 expand_mem_thread_fence (model);
9115 }
9116
9117 /* Expand the __atomic_signal_fence intrinsic:
9118 void __atomic_signal_fence (enum memmodel)
9119 EXP is the CALL_EXPR. */
9120
9121 static void
9122 expand_builtin_atomic_signal_fence (tree exp)
9123 {
9124 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9125 expand_mem_signal_fence (model);
9126 }
9127
9128 /* Expand the __sync_synchronize intrinsic. */
9129
9130 static void
9131 expand_builtin_sync_synchronize (void)
9132 {
9133 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
9134 }
9135
9136 static rtx
9137 expand_builtin_thread_pointer (tree exp, rtx target)
9138 {
9139 enum insn_code icode;
9140 if (!validate_arglist (exp, VOID_TYPE))
9141 return const0_rtx;
9142 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
9143 if (icode != CODE_FOR_nothing)
9144 {
9145 class expand_operand op;
9146 /* If the target is not sutitable then create a new target. */
9147 if (target == NULL_RTX
9148 || !REG_P (target)
9149 || GET_MODE (target) != Pmode)
9150 target = gen_reg_rtx (Pmode);
9151 create_output_operand (&op, target, Pmode);
9152 expand_insn (icode, 1, &op);
9153 return target;
9154 }
9155 error ("%<__builtin_thread_pointer%> is not supported on this target");
9156 return const0_rtx;
9157 }
9158
9159 static void
9160 expand_builtin_set_thread_pointer (tree exp)
9161 {
9162 enum insn_code icode;
9163 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9164 return;
9165 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
9166 if (icode != CODE_FOR_nothing)
9167 {
9168 class expand_operand op;
9169 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
9170 Pmode, EXPAND_NORMAL);
9171 create_input_operand (&op, val, Pmode);
9172 expand_insn (icode, 1, &op);
9173 return;
9174 }
9175 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
9176 }
9177
9178 \f
9179 /* Emit code to restore the current value of stack. */
9180
9181 static void
9182 expand_stack_restore (tree var)
9183 {
9184 rtx_insn *prev;
9185 rtx sa = expand_normal (var);
9186
9187 sa = convert_memory_address (Pmode, sa);
9188
9189 prev = get_last_insn ();
9190 emit_stack_restore (SAVE_BLOCK, sa);
9191
9192 record_new_stack_level ();
9193
9194 fixup_args_size_notes (prev, get_last_insn (), 0);
9195 }
9196
9197 /* Emit code to save the current value of stack. */
9198
9199 static rtx
9200 expand_stack_save (void)
9201 {
9202 rtx ret = NULL_RTX;
9203
9204 emit_stack_save (SAVE_BLOCK, &ret);
9205 return ret;
9206 }
9207
9208 /* Emit code to get the openacc gang, worker or vector id or size. */
9209
9210 static rtx
9211 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
9212 {
9213 const char *name;
9214 rtx fallback_retval;
9215 rtx_insn *(*gen_fn) (rtx, rtx);
9216 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
9217 {
9218 case BUILT_IN_GOACC_PARLEVEL_ID:
9219 name = "__builtin_goacc_parlevel_id";
9220 fallback_retval = const0_rtx;
9221 gen_fn = targetm.gen_oacc_dim_pos;
9222 break;
9223 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9224 name = "__builtin_goacc_parlevel_size";
9225 fallback_retval = const1_rtx;
9226 gen_fn = targetm.gen_oacc_dim_size;
9227 break;
9228 default:
9229 gcc_unreachable ();
9230 }
9231
9232 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
9233 {
9234 error ("%qs only supported in OpenACC code", name);
9235 return const0_rtx;
9236 }
9237
9238 tree arg = CALL_EXPR_ARG (exp, 0);
9239 if (TREE_CODE (arg) != INTEGER_CST)
9240 {
9241 error ("non-constant argument 0 to %qs", name);
9242 return const0_rtx;
9243 }
9244
9245 int dim = TREE_INT_CST_LOW (arg);
9246 switch (dim)
9247 {
9248 case GOMP_DIM_GANG:
9249 case GOMP_DIM_WORKER:
9250 case GOMP_DIM_VECTOR:
9251 break;
9252 default:
9253 error ("illegal argument 0 to %qs", name);
9254 return const0_rtx;
9255 }
9256
9257 if (ignore)
9258 return target;
9259
9260 if (target == NULL_RTX)
9261 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9262
9263 if (!targetm.have_oacc_dim_size ())
9264 {
9265 emit_move_insn (target, fallback_retval);
9266 return target;
9267 }
9268
9269 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
9270 emit_insn (gen_fn (reg, GEN_INT (dim)));
9271 if (reg != target)
9272 emit_move_insn (target, reg);
9273
9274 return target;
9275 }
9276
9277 /* Expand a string compare operation using a sequence of char comparison
9278 to get rid of the calling overhead, with result going to TARGET if
9279 that's convenient.
9280
9281 VAR_STR is the variable string source;
9282 CONST_STR is the constant string source;
9283 LENGTH is the number of chars to compare;
9284 CONST_STR_N indicates which source string is the constant string;
9285 IS_MEMCMP indicates whether it's a memcmp or strcmp.
9286
9287 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
9288
9289 target = (int) (unsigned char) var_str[0]
9290 - (int) (unsigned char) const_str[0];
9291 if (target != 0)
9292 goto ne_label;
9293 ...
9294 target = (int) (unsigned char) var_str[length - 2]
9295 - (int) (unsigned char) const_str[length - 2];
9296 if (target != 0)
9297 goto ne_label;
9298 target = (int) (unsigned char) var_str[length - 1]
9299 - (int) (unsigned char) const_str[length - 1];
9300 ne_label:
9301 */
9302
9303 static rtx
9304 inline_string_cmp (rtx target, tree var_str, const char *const_str,
9305 unsigned HOST_WIDE_INT length,
9306 int const_str_n, machine_mode mode)
9307 {
9308 HOST_WIDE_INT offset = 0;
9309 rtx var_rtx_array
9310 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
9311 rtx var_rtx = NULL_RTX;
9312 rtx const_rtx = NULL_RTX;
9313 rtx result = target ? target : gen_reg_rtx (mode);
9314 rtx_code_label *ne_label = gen_label_rtx ();
9315 tree unit_type_node = unsigned_char_type_node;
9316 scalar_int_mode unit_mode
9317 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
9318
9319 start_sequence ();
9320
9321 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
9322 {
9323 var_rtx
9324 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
9325 const_rtx = c_readstr (const_str + offset, unit_mode);
9326 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
9327 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
9328
9329 op0 = convert_modes (mode, unit_mode, op0, 1);
9330 op1 = convert_modes (mode, unit_mode, op1, 1);
9331 result = expand_simple_binop (mode, MINUS, op0, op1,
9332 result, 1, OPTAB_WIDEN);
9333 if (i < length - 1)
9334 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
9335 mode, true, ne_label);
9336 offset += GET_MODE_SIZE (unit_mode);
9337 }
9338
9339 emit_label (ne_label);
9340 rtx_insn *insns = get_insns ();
9341 end_sequence ();
9342 emit_insn (insns);
9343
9344 return result;
9345 }
9346
9347 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
9348 to TARGET if that's convenient.
9349 If the call is not been inlined, return NULL_RTX. */
9350
9351 static rtx
9352 inline_expand_builtin_bytecmp (tree exp, rtx target)
9353 {
9354 tree fndecl = get_callee_fndecl (exp);
9355 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9356 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
9357
9358 /* Do NOT apply this inlining expansion when optimizing for size or
9359 optimization level below 2. */
9360 if (optimize < 2 || optimize_insn_for_size_p ())
9361 return NULL_RTX;
9362
9363 gcc_checking_assert (fcode == BUILT_IN_STRCMP
9364 || fcode == BUILT_IN_STRNCMP
9365 || fcode == BUILT_IN_MEMCMP);
9366
9367 /* On a target where the type of the call (int) has same or narrower presicion
9368 than unsigned char, give up the inlining expansion. */
9369 if (TYPE_PRECISION (unsigned_char_type_node)
9370 >= TYPE_PRECISION (TREE_TYPE (exp)))
9371 return NULL_RTX;
9372
9373 tree arg1 = CALL_EXPR_ARG (exp, 0);
9374 tree arg2 = CALL_EXPR_ARG (exp, 1);
9375 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
9376
9377 unsigned HOST_WIDE_INT len1 = 0;
9378 unsigned HOST_WIDE_INT len2 = 0;
9379 unsigned HOST_WIDE_INT len3 = 0;
9380
9381 /* Get the object representation of the initializers of ARG1 and ARG2
9382 as strings, provided they refer to constant objects, with their byte
9383 sizes in LEN1 and LEN2, respectively. */
9384 const char *bytes1 = getbyterep (arg1, &len1);
9385 const char *bytes2 = getbyterep (arg2, &len2);
9386
9387 /* Fail if neither argument refers to an initialized constant. */
9388 if (!bytes1 && !bytes2)
9389 return NULL_RTX;
9390
9391 if (is_ncmp)
9392 {
9393 /* Fail if the memcmp/strncmp bound is not a constant. */
9394 if (!tree_fits_uhwi_p (len3_tree))
9395 return NULL_RTX;
9396
9397 len3 = tree_to_uhwi (len3_tree);
9398
9399 if (fcode == BUILT_IN_MEMCMP)
9400 {
9401 /* Fail if the memcmp bound is greater than the size of either
9402 of the two constant objects. */
9403 if ((bytes1 && len1 < len3)
9404 || (bytes2 && len2 < len3))
9405 return NULL_RTX;
9406 }
9407 }
9408
9409 if (fcode != BUILT_IN_MEMCMP)
9410 {
9411 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
9412 and LEN2 to the length of the nul-terminated string stored
9413 in each. */
9414 if (bytes1 != NULL)
9415 len1 = strnlen (bytes1, len1) + 1;
9416 if (bytes2 != NULL)
9417 len2 = strnlen (bytes2, len2) + 1;
9418 }
9419
9420 /* See inline_string_cmp. */
9421 int const_str_n;
9422 if (!len1)
9423 const_str_n = 2;
9424 else if (!len2)
9425 const_str_n = 1;
9426 else if (len2 > len1)
9427 const_str_n = 1;
9428 else
9429 const_str_n = 2;
9430
9431 /* For strncmp only, compute the new bound as the smallest of
9432 the lengths of the two strings (plus 1) and the bound provided
9433 to the function. */
9434 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
9435 if (is_ncmp && len3 < bound)
9436 bound = len3;
9437
9438 /* If the bound of the comparison is larger than the threshold,
9439 do nothing. */
9440 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
9441 return NULL_RTX;
9442
9443 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9444
9445 /* Now, start inline expansion the call. */
9446 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
9447 (const_str_n == 1) ? bytes1 : bytes2, bound,
9448 const_str_n, mode);
9449 }
9450
9451 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
9452 represents the size of the first argument to that call, or VOIDmode
9453 if the argument is a pointer. IGNORE will be true if the result
9454 isn't used. */
9455 static rtx
9456 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
9457 bool ignore)
9458 {
9459 rtx val, failsafe;
9460 unsigned nargs = call_expr_nargs (exp);
9461
9462 tree arg0 = CALL_EXPR_ARG (exp, 0);
9463
9464 if (mode == VOIDmode)
9465 {
9466 mode = TYPE_MODE (TREE_TYPE (arg0));
9467 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
9468 }
9469
9470 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
9471
9472 /* An optional second argument can be used as a failsafe value on
9473 some machines. If it isn't present, then the failsafe value is
9474 assumed to be 0. */
9475 if (nargs > 1)
9476 {
9477 tree arg1 = CALL_EXPR_ARG (exp, 1);
9478 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
9479 }
9480 else
9481 failsafe = const0_rtx;
9482
9483 /* If the result isn't used, the behavior is undefined. It would be
9484 nice to emit a warning here, but path splitting means this might
9485 happen with legitimate code. So simply drop the builtin
9486 expansion in that case; we've handled any side-effects above. */
9487 if (ignore)
9488 return const0_rtx;
9489
9490 /* If we don't have a suitable target, create one to hold the result. */
9491 if (target == NULL || GET_MODE (target) != mode)
9492 target = gen_reg_rtx (mode);
9493
9494 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
9495 val = convert_modes (mode, VOIDmode, val, false);
9496
9497 return targetm.speculation_safe_value (mode, target, val, failsafe);
9498 }
9499
9500 /* Expand an expression EXP that calls a built-in function,
9501 with result going to TARGET if that's convenient
9502 (and in mode MODE if that's convenient).
9503 SUBTARGET may be used as the target for computing one of EXP's operands.
9504 IGNORE is nonzero if the value is to be ignored. */
9505
9506 rtx
9507 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
9508 int ignore)
9509 {
9510 tree fndecl = get_callee_fndecl (exp);
9511 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9512 int flags;
9513
9514 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9515 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
9516
9517 /* When ASan is enabled, we don't want to expand some memory/string
9518 builtins and rely on libsanitizer's hooks. This allows us to avoid
9519 redundant checks and be sure, that possible overflow will be detected
9520 by ASan. */
9521
9522 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9523 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
9524 return expand_call (exp, target, ignore);
9525
9526 /* When not optimizing, generate calls to library functions for a certain
9527 set of builtins. */
9528 if (!optimize
9529 && !called_as_built_in (fndecl)
9530 && fcode != BUILT_IN_FORK
9531 && fcode != BUILT_IN_EXECL
9532 && fcode != BUILT_IN_EXECV
9533 && fcode != BUILT_IN_EXECLP
9534 && fcode != BUILT_IN_EXECLE
9535 && fcode != BUILT_IN_EXECVP
9536 && fcode != BUILT_IN_EXECVE
9537 && fcode != BUILT_IN_CLEAR_CACHE
9538 && !ALLOCA_FUNCTION_CODE_P (fcode)
9539 && fcode != BUILT_IN_FREE)
9540 return expand_call (exp, target, ignore);
9541
9542 /* The built-in function expanders test for target == const0_rtx
9543 to determine whether the function's result will be ignored. */
9544 if (ignore)
9545 target = const0_rtx;
9546
9547 /* If the result of a pure or const built-in function is ignored, and
9548 none of its arguments are volatile, we can avoid expanding the
9549 built-in call and just evaluate the arguments for side-effects. */
9550 if (target == const0_rtx
9551 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
9552 && !(flags & ECF_LOOPING_CONST_OR_PURE))
9553 {
9554 bool volatilep = false;
9555 tree arg;
9556 call_expr_arg_iterator iter;
9557
9558 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9559 if (TREE_THIS_VOLATILE (arg))
9560 {
9561 volatilep = true;
9562 break;
9563 }
9564
9565 if (! volatilep)
9566 {
9567 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9568 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
9569 return const0_rtx;
9570 }
9571 }
9572
9573 switch (fcode)
9574 {
9575 CASE_FLT_FN (BUILT_IN_FABS):
9576 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9577 case BUILT_IN_FABSD32:
9578 case BUILT_IN_FABSD64:
9579 case BUILT_IN_FABSD128:
9580 target = expand_builtin_fabs (exp, target, subtarget);
9581 if (target)
9582 return target;
9583 break;
9584
9585 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9586 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
9587 target = expand_builtin_copysign (exp, target, subtarget);
9588 if (target)
9589 return target;
9590 break;
9591
9592 /* Just do a normal library call if we were unable to fold
9593 the values. */
9594 CASE_FLT_FN (BUILT_IN_CABS):
9595 break;
9596
9597 CASE_FLT_FN (BUILT_IN_FMA):
9598 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9599 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
9600 if (target)
9601 return target;
9602 break;
9603
9604 CASE_FLT_FN (BUILT_IN_ILOGB):
9605 if (! flag_unsafe_math_optimizations)
9606 break;
9607 gcc_fallthrough ();
9608 CASE_FLT_FN (BUILT_IN_ISINF):
9609 CASE_FLT_FN (BUILT_IN_FINITE):
9610 case BUILT_IN_ISFINITE:
9611 case BUILT_IN_ISNORMAL:
9612 target = expand_builtin_interclass_mathfn (exp, target);
9613 if (target)
9614 return target;
9615 break;
9616
9617 CASE_FLT_FN (BUILT_IN_ICEIL):
9618 CASE_FLT_FN (BUILT_IN_LCEIL):
9619 CASE_FLT_FN (BUILT_IN_LLCEIL):
9620 CASE_FLT_FN (BUILT_IN_LFLOOR):
9621 CASE_FLT_FN (BUILT_IN_IFLOOR):
9622 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9623 target = expand_builtin_int_roundingfn (exp, target);
9624 if (target)
9625 return target;
9626 break;
9627
9628 CASE_FLT_FN (BUILT_IN_IRINT):
9629 CASE_FLT_FN (BUILT_IN_LRINT):
9630 CASE_FLT_FN (BUILT_IN_LLRINT):
9631 CASE_FLT_FN (BUILT_IN_IROUND):
9632 CASE_FLT_FN (BUILT_IN_LROUND):
9633 CASE_FLT_FN (BUILT_IN_LLROUND):
9634 target = expand_builtin_int_roundingfn_2 (exp, target);
9635 if (target)
9636 return target;
9637 break;
9638
9639 CASE_FLT_FN (BUILT_IN_POWI):
9640 target = expand_builtin_powi (exp, target);
9641 if (target)
9642 return target;
9643 break;
9644
9645 CASE_FLT_FN (BUILT_IN_CEXPI):
9646 target = expand_builtin_cexpi (exp, target);
9647 gcc_assert (target);
9648 return target;
9649
9650 CASE_FLT_FN (BUILT_IN_SIN):
9651 CASE_FLT_FN (BUILT_IN_COS):
9652 if (! flag_unsafe_math_optimizations)
9653 break;
9654 target = expand_builtin_mathfn_3 (exp, target, subtarget);
9655 if (target)
9656 return target;
9657 break;
9658
9659 CASE_FLT_FN (BUILT_IN_SINCOS):
9660 if (! flag_unsafe_math_optimizations)
9661 break;
9662 target = expand_builtin_sincos (exp);
9663 if (target)
9664 return target;
9665 break;
9666
9667 case BUILT_IN_APPLY_ARGS:
9668 return expand_builtin_apply_args ();
9669
9670 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
9671 FUNCTION with a copy of the parameters described by
9672 ARGUMENTS, and ARGSIZE. It returns a block of memory
9673 allocated on the stack into which is stored all the registers
9674 that might possibly be used for returning the result of a
9675 function. ARGUMENTS is the value returned by
9676 __builtin_apply_args. ARGSIZE is the number of bytes of
9677 arguments that must be copied. ??? How should this value be
9678 computed? We'll also need a safe worst case value for varargs
9679 functions. */
9680 case BUILT_IN_APPLY:
9681 if (!validate_arglist (exp, POINTER_TYPE,
9682 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
9683 && !validate_arglist (exp, REFERENCE_TYPE,
9684 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9685 return const0_rtx;
9686 else
9687 {
9688 rtx ops[3];
9689
9690 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
9691 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
9692 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
9693
9694 return expand_builtin_apply (ops[0], ops[1], ops[2]);
9695 }
9696
9697 /* __builtin_return (RESULT) causes the function to return the
9698 value described by RESULT. RESULT is address of the block of
9699 memory returned by __builtin_apply. */
9700 case BUILT_IN_RETURN:
9701 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9702 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
9703 return const0_rtx;
9704
9705 case BUILT_IN_SAVEREGS:
9706 return expand_builtin_saveregs ();
9707
9708 case BUILT_IN_VA_ARG_PACK:
9709 /* All valid uses of __builtin_va_arg_pack () are removed during
9710 inlining. */
9711 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9712 return const0_rtx;
9713
9714 case BUILT_IN_VA_ARG_PACK_LEN:
9715 /* All valid uses of __builtin_va_arg_pack_len () are removed during
9716 inlining. */
9717 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
9718 return const0_rtx;
9719
9720 /* Return the address of the first anonymous stack arg. */
9721 case BUILT_IN_NEXT_ARG:
9722 if (fold_builtin_next_arg (exp, false))
9723 return const0_rtx;
9724 return expand_builtin_next_arg ();
9725
9726 case BUILT_IN_CLEAR_CACHE:
9727 expand_builtin___clear_cache (exp);
9728 return const0_rtx;
9729
9730 case BUILT_IN_CLASSIFY_TYPE:
9731 return expand_builtin_classify_type (exp);
9732
9733 case BUILT_IN_CONSTANT_P:
9734 return const0_rtx;
9735
9736 case BUILT_IN_FRAME_ADDRESS:
9737 case BUILT_IN_RETURN_ADDRESS:
9738 return expand_builtin_frame_address (fndecl, exp);
9739
9740 /* Returns the address of the area where the structure is returned.
9741 0 otherwise. */
9742 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9743 if (call_expr_nargs (exp) != 0
9744 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9745 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9746 return const0_rtx;
9747 else
9748 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9749
9750 CASE_BUILT_IN_ALLOCA:
9751 target = expand_builtin_alloca (exp);
9752 if (target)
9753 return target;
9754 break;
9755
9756 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
9757 return expand_asan_emit_allocas_unpoison (exp);
9758
9759 case BUILT_IN_STACK_SAVE:
9760 return expand_stack_save ();
9761
9762 case BUILT_IN_STACK_RESTORE:
9763 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
9764 return const0_rtx;
9765
9766 case BUILT_IN_BSWAP16:
9767 case BUILT_IN_BSWAP32:
9768 case BUILT_IN_BSWAP64:
9769 case BUILT_IN_BSWAP128:
9770 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
9771 if (target)
9772 return target;
9773 break;
9774
9775 CASE_INT_FN (BUILT_IN_FFS):
9776 target = expand_builtin_unop (target_mode, exp, target,
9777 subtarget, ffs_optab);
9778 if (target)
9779 return target;
9780 break;
9781
9782 CASE_INT_FN (BUILT_IN_CLZ):
9783 target = expand_builtin_unop (target_mode, exp, target,
9784 subtarget, clz_optab);
9785 if (target)
9786 return target;
9787 break;
9788
9789 CASE_INT_FN (BUILT_IN_CTZ):
9790 target = expand_builtin_unop (target_mode, exp, target,
9791 subtarget, ctz_optab);
9792 if (target)
9793 return target;
9794 break;
9795
9796 CASE_INT_FN (BUILT_IN_CLRSB):
9797 target = expand_builtin_unop (target_mode, exp, target,
9798 subtarget, clrsb_optab);
9799 if (target)
9800 return target;
9801 break;
9802
9803 CASE_INT_FN (BUILT_IN_POPCOUNT):
9804 target = expand_builtin_unop (target_mode, exp, target,
9805 subtarget, popcount_optab);
9806 if (target)
9807 return target;
9808 break;
9809
9810 CASE_INT_FN (BUILT_IN_PARITY):
9811 target = expand_builtin_unop (target_mode, exp, target,
9812 subtarget, parity_optab);
9813 if (target)
9814 return target;
9815 break;
9816
9817 case BUILT_IN_STRLEN:
9818 target = expand_builtin_strlen (exp, target, target_mode);
9819 if (target)
9820 return target;
9821 break;
9822
9823 case BUILT_IN_STRNLEN:
9824 target = expand_builtin_strnlen (exp, target, target_mode);
9825 if (target)
9826 return target;
9827 break;
9828
9829 case BUILT_IN_STRCAT:
9830 target = expand_builtin_strcat (exp);
9831 if (target)
9832 return target;
9833 break;
9834
9835 case BUILT_IN_GETTEXT:
9836 case BUILT_IN_PUTS:
9837 case BUILT_IN_PUTS_UNLOCKED:
9838 case BUILT_IN_STRDUP:
9839 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9840 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9841 break;
9842
9843 case BUILT_IN_INDEX:
9844 case BUILT_IN_RINDEX:
9845 case BUILT_IN_STRCHR:
9846 case BUILT_IN_STRRCHR:
9847 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9848 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9849 break;
9850
9851 case BUILT_IN_FPUTS:
9852 case BUILT_IN_FPUTS_UNLOCKED:
9853 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9854 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9855 break;
9856
9857 case BUILT_IN_STRNDUP:
9858 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9859 check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1));
9860 break;
9861
9862 case BUILT_IN_STRCASECMP:
9863 case BUILT_IN_STRPBRK:
9864 case BUILT_IN_STRSPN:
9865 case BUILT_IN_STRCSPN:
9866 case BUILT_IN_STRSTR:
9867 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9868 {
9869 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9870 check_read_access (exp, CALL_EXPR_ARG (exp, 1));
9871 }
9872 break;
9873
9874 case BUILT_IN_STRCPY:
9875 target = expand_builtin_strcpy (exp, target);
9876 if (target)
9877 return target;
9878 break;
9879
9880 case BUILT_IN_STRNCAT:
9881 target = expand_builtin_strncat (exp, target);
9882 if (target)
9883 return target;
9884 break;
9885
9886 case BUILT_IN_STRNCPY:
9887 target = expand_builtin_strncpy (exp, target);
9888 if (target)
9889 return target;
9890 break;
9891
9892 case BUILT_IN_STPCPY:
9893 target = expand_builtin_stpcpy (exp, target, mode);
9894 if (target)
9895 return target;
9896 break;
9897
9898 case BUILT_IN_STPNCPY:
9899 target = expand_builtin_stpncpy (exp, target);
9900 if (target)
9901 return target;
9902 break;
9903
9904 case BUILT_IN_MEMCHR:
9905 target = expand_builtin_memchr (exp, target);
9906 if (target)
9907 return target;
9908 break;
9909
9910 case BUILT_IN_MEMCPY:
9911 target = expand_builtin_memcpy (exp, target);
9912 if (target)
9913 return target;
9914 break;
9915
9916 case BUILT_IN_MEMMOVE:
9917 target = expand_builtin_memmove (exp, target);
9918 if (target)
9919 return target;
9920 break;
9921
9922 case BUILT_IN_MEMPCPY:
9923 target = expand_builtin_mempcpy (exp, target);
9924 if (target)
9925 return target;
9926 break;
9927
9928 case BUILT_IN_MEMSET:
9929 target = expand_builtin_memset (exp, target, mode);
9930 if (target)
9931 return target;
9932 break;
9933
9934 case BUILT_IN_BZERO:
9935 target = expand_builtin_bzero (exp);
9936 if (target)
9937 return target;
9938 break;
9939
9940 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9941 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
9942 when changing it to a strcmp call. */
9943 case BUILT_IN_STRCMP_EQ:
9944 target = expand_builtin_memcmp (exp, target, true);
9945 if (target)
9946 return target;
9947
9948 /* Change this call back to a BUILT_IN_STRCMP. */
9949 TREE_OPERAND (exp, 1)
9950 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
9951
9952 /* Delete the last parameter. */
9953 unsigned int i;
9954 vec<tree, va_gc> *arg_vec;
9955 vec_alloc (arg_vec, 2);
9956 for (i = 0; i < 2; i++)
9957 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
9958 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
9959 /* FALLTHROUGH */
9960
9961 case BUILT_IN_STRCMP:
9962 target = expand_builtin_strcmp (exp, target);
9963 if (target)
9964 return target;
9965 break;
9966
9967 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9968 back to a BUILT_IN_STRNCMP. */
9969 case BUILT_IN_STRNCMP_EQ:
9970 target = expand_builtin_memcmp (exp, target, true);
9971 if (target)
9972 return target;
9973
9974 /* Change it back to a BUILT_IN_STRNCMP. */
9975 TREE_OPERAND (exp, 1)
9976 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
9977 /* FALLTHROUGH */
9978
9979 case BUILT_IN_STRNCMP:
9980 target = expand_builtin_strncmp (exp, target, mode);
9981 if (target)
9982 return target;
9983 break;
9984
9985 case BUILT_IN_BCMP:
9986 case BUILT_IN_MEMCMP:
9987 case BUILT_IN_MEMCMP_EQ:
9988 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
9989 if (target)
9990 return target;
9991 if (fcode == BUILT_IN_MEMCMP_EQ)
9992 {
9993 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
9994 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
9995 }
9996 break;
9997
9998 case BUILT_IN_SETJMP:
9999 /* This should have been lowered to the builtins below. */
10000 gcc_unreachable ();
10001
10002 case BUILT_IN_SETJMP_SETUP:
10003 /* __builtin_setjmp_setup is passed a pointer to an array of five words
10004 and the receiver label. */
10005 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
10006 {
10007 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10008 VOIDmode, EXPAND_NORMAL);
10009 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
10010 rtx_insn *label_r = label_rtx (label);
10011
10012 /* This is copied from the handling of non-local gotos. */
10013 expand_builtin_setjmp_setup (buf_addr, label_r);
10014 nonlocal_goto_handler_labels
10015 = gen_rtx_INSN_LIST (VOIDmode, label_r,
10016 nonlocal_goto_handler_labels);
10017 /* ??? Do not let expand_label treat us as such since we would
10018 not want to be both on the list of non-local labels and on
10019 the list of forced labels. */
10020 FORCED_LABEL (label) = 0;
10021 return const0_rtx;
10022 }
10023 break;
10024
10025 case BUILT_IN_SETJMP_RECEIVER:
10026 /* __builtin_setjmp_receiver is passed the receiver label. */
10027 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10028 {
10029 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
10030 rtx_insn *label_r = label_rtx (label);
10031
10032 expand_builtin_setjmp_receiver (label_r);
10033 return const0_rtx;
10034 }
10035 break;
10036
10037 /* __builtin_longjmp is passed a pointer to an array of five words.
10038 It's similar to the C library longjmp function but works with
10039 __builtin_setjmp above. */
10040 case BUILT_IN_LONGJMP:
10041 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10042 {
10043 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10044 VOIDmode, EXPAND_NORMAL);
10045 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
10046
10047 if (value != const1_rtx)
10048 {
10049 error ("%<__builtin_longjmp%> second argument must be 1");
10050 return const0_rtx;
10051 }
10052
10053 expand_builtin_longjmp (buf_addr, value);
10054 return const0_rtx;
10055 }
10056 break;
10057
10058 case BUILT_IN_NONLOCAL_GOTO:
10059 target = expand_builtin_nonlocal_goto (exp);
10060 if (target)
10061 return target;
10062 break;
10063
10064 /* This updates the setjmp buffer that is its argument with the value
10065 of the current stack pointer. */
10066 case BUILT_IN_UPDATE_SETJMP_BUF:
10067 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10068 {
10069 rtx buf_addr
10070 = expand_normal (CALL_EXPR_ARG (exp, 0));
10071
10072 expand_builtin_update_setjmp_buf (buf_addr);
10073 return const0_rtx;
10074 }
10075 break;
10076
10077 case BUILT_IN_TRAP:
10078 expand_builtin_trap ();
10079 return const0_rtx;
10080
10081 case BUILT_IN_UNREACHABLE:
10082 expand_builtin_unreachable ();
10083 return const0_rtx;
10084
10085 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10086 case BUILT_IN_SIGNBITD32:
10087 case BUILT_IN_SIGNBITD64:
10088 case BUILT_IN_SIGNBITD128:
10089 target = expand_builtin_signbit (exp, target);
10090 if (target)
10091 return target;
10092 break;
10093
10094 /* Various hooks for the DWARF 2 __throw routine. */
10095 case BUILT_IN_UNWIND_INIT:
10096 expand_builtin_unwind_init ();
10097 return const0_rtx;
10098 case BUILT_IN_DWARF_CFA:
10099 return virtual_cfa_rtx;
10100 #ifdef DWARF2_UNWIND_INFO
10101 case BUILT_IN_DWARF_SP_COLUMN:
10102 return expand_builtin_dwarf_sp_column ();
10103 case BUILT_IN_INIT_DWARF_REG_SIZES:
10104 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
10105 return const0_rtx;
10106 #endif
10107 case BUILT_IN_FROB_RETURN_ADDR:
10108 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
10109 case BUILT_IN_EXTRACT_RETURN_ADDR:
10110 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
10111 case BUILT_IN_EH_RETURN:
10112 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
10113 CALL_EXPR_ARG (exp, 1));
10114 return const0_rtx;
10115 case BUILT_IN_EH_RETURN_DATA_REGNO:
10116 return expand_builtin_eh_return_data_regno (exp);
10117 case BUILT_IN_EXTEND_POINTER:
10118 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
10119 case BUILT_IN_EH_POINTER:
10120 return expand_builtin_eh_pointer (exp);
10121 case BUILT_IN_EH_FILTER:
10122 return expand_builtin_eh_filter (exp);
10123 case BUILT_IN_EH_COPY_VALUES:
10124 return expand_builtin_eh_copy_values (exp);
10125
10126 case BUILT_IN_VA_START:
10127 return expand_builtin_va_start (exp);
10128 case BUILT_IN_VA_END:
10129 return expand_builtin_va_end (exp);
10130 case BUILT_IN_VA_COPY:
10131 return expand_builtin_va_copy (exp);
10132 case BUILT_IN_EXPECT:
10133 return expand_builtin_expect (exp, target);
10134 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10135 return expand_builtin_expect_with_probability (exp, target);
10136 case BUILT_IN_ASSUME_ALIGNED:
10137 return expand_builtin_assume_aligned (exp, target);
10138 case BUILT_IN_PREFETCH:
10139 expand_builtin_prefetch (exp);
10140 return const0_rtx;
10141
10142 case BUILT_IN_INIT_TRAMPOLINE:
10143 return expand_builtin_init_trampoline (exp, true);
10144 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
10145 return expand_builtin_init_trampoline (exp, false);
10146 case BUILT_IN_ADJUST_TRAMPOLINE:
10147 return expand_builtin_adjust_trampoline (exp);
10148
10149 case BUILT_IN_INIT_DESCRIPTOR:
10150 return expand_builtin_init_descriptor (exp);
10151 case BUILT_IN_ADJUST_DESCRIPTOR:
10152 return expand_builtin_adjust_descriptor (exp);
10153
10154 case BUILT_IN_FORK:
10155 case BUILT_IN_EXECL:
10156 case BUILT_IN_EXECV:
10157 case BUILT_IN_EXECLP:
10158 case BUILT_IN_EXECLE:
10159 case BUILT_IN_EXECVP:
10160 case BUILT_IN_EXECVE:
10161 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
10162 if (target)
10163 return target;
10164 break;
10165
10166 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
10167 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
10168 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
10169 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
10170 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
10171 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
10172 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
10173 if (target)
10174 return target;
10175 break;
10176
10177 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
10178 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
10179 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
10180 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
10181 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
10182 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
10183 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
10184 if (target)
10185 return target;
10186 break;
10187
10188 case BUILT_IN_SYNC_FETCH_AND_OR_1:
10189 case BUILT_IN_SYNC_FETCH_AND_OR_2:
10190 case BUILT_IN_SYNC_FETCH_AND_OR_4:
10191 case BUILT_IN_SYNC_FETCH_AND_OR_8:
10192 case BUILT_IN_SYNC_FETCH_AND_OR_16:
10193 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
10194 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
10195 if (target)
10196 return target;
10197 break;
10198
10199 case BUILT_IN_SYNC_FETCH_AND_AND_1:
10200 case BUILT_IN_SYNC_FETCH_AND_AND_2:
10201 case BUILT_IN_SYNC_FETCH_AND_AND_4:
10202 case BUILT_IN_SYNC_FETCH_AND_AND_8:
10203 case BUILT_IN_SYNC_FETCH_AND_AND_16:
10204 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
10205 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
10206 if (target)
10207 return target;
10208 break;
10209
10210 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
10211 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
10212 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
10213 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
10214 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
10215 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
10216 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
10217 if (target)
10218 return target;
10219 break;
10220
10221 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
10222 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
10223 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
10224 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
10225 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
10226 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
10227 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
10228 if (target)
10229 return target;
10230 break;
10231
10232 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
10233 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
10234 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
10235 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
10236 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
10237 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
10238 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
10239 if (target)
10240 return target;
10241 break;
10242
10243 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
10244 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
10245 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
10246 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
10247 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
10248 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
10249 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
10250 if (target)
10251 return target;
10252 break;
10253
10254 case BUILT_IN_SYNC_OR_AND_FETCH_1:
10255 case BUILT_IN_SYNC_OR_AND_FETCH_2:
10256 case BUILT_IN_SYNC_OR_AND_FETCH_4:
10257 case BUILT_IN_SYNC_OR_AND_FETCH_8:
10258 case BUILT_IN_SYNC_OR_AND_FETCH_16:
10259 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
10260 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
10261 if (target)
10262 return target;
10263 break;
10264
10265 case BUILT_IN_SYNC_AND_AND_FETCH_1:
10266 case BUILT_IN_SYNC_AND_AND_FETCH_2:
10267 case BUILT_IN_SYNC_AND_AND_FETCH_4:
10268 case BUILT_IN_SYNC_AND_AND_FETCH_8:
10269 case BUILT_IN_SYNC_AND_AND_FETCH_16:
10270 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
10271 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
10272 if (target)
10273 return target;
10274 break;
10275
10276 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
10277 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
10278 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
10279 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
10280 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
10281 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
10282 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
10283 if (target)
10284 return target;
10285 break;
10286
10287 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
10288 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
10289 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
10290 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
10291 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
10292 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
10293 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
10294 if (target)
10295 return target;
10296 break;
10297
10298 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
10299 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
10300 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
10301 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
10302 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
10303 if (mode == VOIDmode)
10304 mode = TYPE_MODE (boolean_type_node);
10305 if (!target || !register_operand (target, mode))
10306 target = gen_reg_rtx (mode);
10307
10308 mode = get_builtin_sync_mode
10309 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
10310 target = expand_builtin_compare_and_swap (mode, exp, true, target);
10311 if (target)
10312 return target;
10313 break;
10314
10315 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
10316 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
10317 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
10318 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
10319 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
10320 mode = get_builtin_sync_mode
10321 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
10322 target = expand_builtin_compare_and_swap (mode, exp, false, target);
10323 if (target)
10324 return target;
10325 break;
10326
10327 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
10328 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
10329 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
10330 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
10331 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
10332 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
10333 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
10334 if (target)
10335 return target;
10336 break;
10337
10338 case BUILT_IN_SYNC_LOCK_RELEASE_1:
10339 case BUILT_IN_SYNC_LOCK_RELEASE_2:
10340 case BUILT_IN_SYNC_LOCK_RELEASE_4:
10341 case BUILT_IN_SYNC_LOCK_RELEASE_8:
10342 case BUILT_IN_SYNC_LOCK_RELEASE_16:
10343 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
10344 expand_builtin_sync_lock_release (mode, exp);
10345 return const0_rtx;
10346
10347 case BUILT_IN_SYNC_SYNCHRONIZE:
10348 expand_builtin_sync_synchronize ();
10349 return const0_rtx;
10350
10351 case BUILT_IN_ATOMIC_EXCHANGE_1:
10352 case BUILT_IN_ATOMIC_EXCHANGE_2:
10353 case BUILT_IN_ATOMIC_EXCHANGE_4:
10354 case BUILT_IN_ATOMIC_EXCHANGE_8:
10355 case BUILT_IN_ATOMIC_EXCHANGE_16:
10356 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
10357 target = expand_builtin_atomic_exchange (mode, exp, target);
10358 if (target)
10359 return target;
10360 break;
10361
10362 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
10363 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
10364 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
10365 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
10366 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
10367 {
10368 unsigned int nargs, z;
10369 vec<tree, va_gc> *vec;
10370
10371 mode =
10372 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
10373 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
10374 if (target)
10375 return target;
10376
10377 /* If this is turned into an external library call, the weak parameter
10378 must be dropped to match the expected parameter list. */
10379 nargs = call_expr_nargs (exp);
10380 vec_alloc (vec, nargs - 1);
10381 for (z = 0; z < 3; z++)
10382 vec->quick_push (CALL_EXPR_ARG (exp, z));
10383 /* Skip the boolean weak parameter. */
10384 for (z = 4; z < 6; z++)
10385 vec->quick_push (CALL_EXPR_ARG (exp, z));
10386 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
10387 break;
10388 }
10389
10390 case BUILT_IN_ATOMIC_LOAD_1:
10391 case BUILT_IN_ATOMIC_LOAD_2:
10392 case BUILT_IN_ATOMIC_LOAD_4:
10393 case BUILT_IN_ATOMIC_LOAD_8:
10394 case BUILT_IN_ATOMIC_LOAD_16:
10395 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
10396 target = expand_builtin_atomic_load (mode, exp, target);
10397 if (target)
10398 return target;
10399 break;
10400
10401 case BUILT_IN_ATOMIC_STORE_1:
10402 case BUILT_IN_ATOMIC_STORE_2:
10403 case BUILT_IN_ATOMIC_STORE_4:
10404 case BUILT_IN_ATOMIC_STORE_8:
10405 case BUILT_IN_ATOMIC_STORE_16:
10406 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
10407 target = expand_builtin_atomic_store (mode, exp);
10408 if (target)
10409 return const0_rtx;
10410 break;
10411
10412 case BUILT_IN_ATOMIC_ADD_FETCH_1:
10413 case BUILT_IN_ATOMIC_ADD_FETCH_2:
10414 case BUILT_IN_ATOMIC_ADD_FETCH_4:
10415 case BUILT_IN_ATOMIC_ADD_FETCH_8:
10416 case BUILT_IN_ATOMIC_ADD_FETCH_16:
10417 {
10418 enum built_in_function lib;
10419 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
10420 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
10421 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
10422 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
10423 ignore, lib);
10424 if (target)
10425 return target;
10426 break;
10427 }
10428 case BUILT_IN_ATOMIC_SUB_FETCH_1:
10429 case BUILT_IN_ATOMIC_SUB_FETCH_2:
10430 case BUILT_IN_ATOMIC_SUB_FETCH_4:
10431 case BUILT_IN_ATOMIC_SUB_FETCH_8:
10432 case BUILT_IN_ATOMIC_SUB_FETCH_16:
10433 {
10434 enum built_in_function lib;
10435 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
10436 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
10437 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
10438 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
10439 ignore, lib);
10440 if (target)
10441 return target;
10442 break;
10443 }
10444 case BUILT_IN_ATOMIC_AND_FETCH_1:
10445 case BUILT_IN_ATOMIC_AND_FETCH_2:
10446 case BUILT_IN_ATOMIC_AND_FETCH_4:
10447 case BUILT_IN_ATOMIC_AND_FETCH_8:
10448 case BUILT_IN_ATOMIC_AND_FETCH_16:
10449 {
10450 enum built_in_function lib;
10451 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
10452 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
10453 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
10454 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
10455 ignore, lib);
10456 if (target)
10457 return target;
10458 break;
10459 }
10460 case BUILT_IN_ATOMIC_NAND_FETCH_1:
10461 case BUILT_IN_ATOMIC_NAND_FETCH_2:
10462 case BUILT_IN_ATOMIC_NAND_FETCH_4:
10463 case BUILT_IN_ATOMIC_NAND_FETCH_8:
10464 case BUILT_IN_ATOMIC_NAND_FETCH_16:
10465 {
10466 enum built_in_function lib;
10467 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
10468 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
10469 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
10470 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
10471 ignore, lib);
10472 if (target)
10473 return target;
10474 break;
10475 }
10476 case BUILT_IN_ATOMIC_XOR_FETCH_1:
10477 case BUILT_IN_ATOMIC_XOR_FETCH_2:
10478 case BUILT_IN_ATOMIC_XOR_FETCH_4:
10479 case BUILT_IN_ATOMIC_XOR_FETCH_8:
10480 case BUILT_IN_ATOMIC_XOR_FETCH_16:
10481 {
10482 enum built_in_function lib;
10483 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
10484 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
10485 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
10486 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
10487 ignore, lib);
10488 if (target)
10489 return target;
10490 break;
10491 }
10492 case BUILT_IN_ATOMIC_OR_FETCH_1:
10493 case BUILT_IN_ATOMIC_OR_FETCH_2:
10494 case BUILT_IN_ATOMIC_OR_FETCH_4:
10495 case BUILT_IN_ATOMIC_OR_FETCH_8:
10496 case BUILT_IN_ATOMIC_OR_FETCH_16:
10497 {
10498 enum built_in_function lib;
10499 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
10500 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
10501 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
10502 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
10503 ignore, lib);
10504 if (target)
10505 return target;
10506 break;
10507 }
10508 case BUILT_IN_ATOMIC_FETCH_ADD_1:
10509 case BUILT_IN_ATOMIC_FETCH_ADD_2:
10510 case BUILT_IN_ATOMIC_FETCH_ADD_4:
10511 case BUILT_IN_ATOMIC_FETCH_ADD_8:
10512 case BUILT_IN_ATOMIC_FETCH_ADD_16:
10513 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
10514 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
10515 ignore, BUILT_IN_NONE);
10516 if (target)
10517 return target;
10518 break;
10519
10520 case BUILT_IN_ATOMIC_FETCH_SUB_1:
10521 case BUILT_IN_ATOMIC_FETCH_SUB_2:
10522 case BUILT_IN_ATOMIC_FETCH_SUB_4:
10523 case BUILT_IN_ATOMIC_FETCH_SUB_8:
10524 case BUILT_IN_ATOMIC_FETCH_SUB_16:
10525 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
10526 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
10527 ignore, BUILT_IN_NONE);
10528 if (target)
10529 return target;
10530 break;
10531
10532 case BUILT_IN_ATOMIC_FETCH_AND_1:
10533 case BUILT_IN_ATOMIC_FETCH_AND_2:
10534 case BUILT_IN_ATOMIC_FETCH_AND_4:
10535 case BUILT_IN_ATOMIC_FETCH_AND_8:
10536 case BUILT_IN_ATOMIC_FETCH_AND_16:
10537 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
10538 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
10539 ignore, BUILT_IN_NONE);
10540 if (target)
10541 return target;
10542 break;
10543
10544 case BUILT_IN_ATOMIC_FETCH_NAND_1:
10545 case BUILT_IN_ATOMIC_FETCH_NAND_2:
10546 case BUILT_IN_ATOMIC_FETCH_NAND_4:
10547 case BUILT_IN_ATOMIC_FETCH_NAND_8:
10548 case BUILT_IN_ATOMIC_FETCH_NAND_16:
10549 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
10550 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
10551 ignore, BUILT_IN_NONE);
10552 if (target)
10553 return target;
10554 break;
10555
10556 case BUILT_IN_ATOMIC_FETCH_XOR_1:
10557 case BUILT_IN_ATOMIC_FETCH_XOR_2:
10558 case BUILT_IN_ATOMIC_FETCH_XOR_4:
10559 case BUILT_IN_ATOMIC_FETCH_XOR_8:
10560 case BUILT_IN_ATOMIC_FETCH_XOR_16:
10561 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
10562 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
10563 ignore, BUILT_IN_NONE);
10564 if (target)
10565 return target;
10566 break;
10567
10568 case BUILT_IN_ATOMIC_FETCH_OR_1:
10569 case BUILT_IN_ATOMIC_FETCH_OR_2:
10570 case BUILT_IN_ATOMIC_FETCH_OR_4:
10571 case BUILT_IN_ATOMIC_FETCH_OR_8:
10572 case BUILT_IN_ATOMIC_FETCH_OR_16:
10573 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
10574 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
10575 ignore, BUILT_IN_NONE);
10576 if (target)
10577 return target;
10578 break;
10579
10580 case BUILT_IN_ATOMIC_TEST_AND_SET:
10581 return expand_builtin_atomic_test_and_set (exp, target);
10582
10583 case BUILT_IN_ATOMIC_CLEAR:
10584 return expand_builtin_atomic_clear (exp);
10585
10586 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10587 return expand_builtin_atomic_always_lock_free (exp);
10588
10589 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10590 target = expand_builtin_atomic_is_lock_free (exp);
10591 if (target)
10592 return target;
10593 break;
10594
10595 case BUILT_IN_ATOMIC_THREAD_FENCE:
10596 expand_builtin_atomic_thread_fence (exp);
10597 return const0_rtx;
10598
10599 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
10600 expand_builtin_atomic_signal_fence (exp);
10601 return const0_rtx;
10602
10603 case BUILT_IN_OBJECT_SIZE:
10604 return expand_builtin_object_size (exp);
10605
10606 case BUILT_IN_MEMCPY_CHK:
10607 case BUILT_IN_MEMPCPY_CHK:
10608 case BUILT_IN_MEMMOVE_CHK:
10609 case BUILT_IN_MEMSET_CHK:
10610 target = expand_builtin_memory_chk (exp, target, mode, fcode);
10611 if (target)
10612 return target;
10613 break;
10614
10615 case BUILT_IN_STRCPY_CHK:
10616 case BUILT_IN_STPCPY_CHK:
10617 case BUILT_IN_STRNCPY_CHK:
10618 case BUILT_IN_STPNCPY_CHK:
10619 case BUILT_IN_STRCAT_CHK:
10620 case BUILT_IN_STRNCAT_CHK:
10621 case BUILT_IN_SNPRINTF_CHK:
10622 case BUILT_IN_VSNPRINTF_CHK:
10623 maybe_emit_chk_warning (exp, fcode);
10624 break;
10625
10626 case BUILT_IN_SPRINTF_CHK:
10627 case BUILT_IN_VSPRINTF_CHK:
10628 maybe_emit_sprintf_chk_warning (exp, fcode);
10629 break;
10630
10631 case BUILT_IN_THREAD_POINTER:
10632 return expand_builtin_thread_pointer (exp, target);
10633
10634 case BUILT_IN_SET_THREAD_POINTER:
10635 expand_builtin_set_thread_pointer (exp);
10636 return const0_rtx;
10637
10638 case BUILT_IN_ACC_ON_DEVICE:
10639 /* Do library call, if we failed to expand the builtin when
10640 folding. */
10641 break;
10642
10643 case BUILT_IN_GOACC_PARLEVEL_ID:
10644 case BUILT_IN_GOACC_PARLEVEL_SIZE:
10645 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
10646
10647 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
10648 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
10649
10650 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
10651 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
10652 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
10653 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
10654 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
10655 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
10656 return expand_speculation_safe_value (mode, exp, target, ignore);
10657
10658 default: /* just do library call, if unknown builtin */
10659 break;
10660 }
10661
10662 /* The switch statement above can drop through to cause the function
10663 to be called normally. */
10664 return expand_call (exp, target, ignore);
10665 }
10666
10667 /* Determine whether a tree node represents a call to a built-in
10668 function. If the tree T is a call to a built-in function with
10669 the right number of arguments of the appropriate types, return
10670 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
10671 Otherwise the return value is END_BUILTINS. */
10672
10673 enum built_in_function
10674 builtin_mathfn_code (const_tree t)
10675 {
10676 const_tree fndecl, arg, parmlist;
10677 const_tree argtype, parmtype;
10678 const_call_expr_arg_iterator iter;
10679
10680 if (TREE_CODE (t) != CALL_EXPR)
10681 return END_BUILTINS;
10682
10683 fndecl = get_callee_fndecl (t);
10684 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10685 return END_BUILTINS;
10686
10687 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
10688 init_const_call_expr_arg_iterator (t, &iter);
10689 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
10690 {
10691 /* If a function doesn't take a variable number of arguments,
10692 the last element in the list will have type `void'. */
10693 parmtype = TREE_VALUE (parmlist);
10694 if (VOID_TYPE_P (parmtype))
10695 {
10696 if (more_const_call_expr_args_p (&iter))
10697 return END_BUILTINS;
10698 return DECL_FUNCTION_CODE (fndecl);
10699 }
10700
10701 if (! more_const_call_expr_args_p (&iter))
10702 return END_BUILTINS;
10703
10704 arg = next_const_call_expr_arg (&iter);
10705 argtype = TREE_TYPE (arg);
10706
10707 if (SCALAR_FLOAT_TYPE_P (parmtype))
10708 {
10709 if (! SCALAR_FLOAT_TYPE_P (argtype))
10710 return END_BUILTINS;
10711 }
10712 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
10713 {
10714 if (! COMPLEX_FLOAT_TYPE_P (argtype))
10715 return END_BUILTINS;
10716 }
10717 else if (POINTER_TYPE_P (parmtype))
10718 {
10719 if (! POINTER_TYPE_P (argtype))
10720 return END_BUILTINS;
10721 }
10722 else if (INTEGRAL_TYPE_P (parmtype))
10723 {
10724 if (! INTEGRAL_TYPE_P (argtype))
10725 return END_BUILTINS;
10726 }
10727 else
10728 return END_BUILTINS;
10729 }
10730
10731 /* Variable-length argument list. */
10732 return DECL_FUNCTION_CODE (fndecl);
10733 }
10734
10735 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
10736 evaluate to a constant. */
10737
10738 static tree
10739 fold_builtin_constant_p (tree arg)
10740 {
10741 /* We return 1 for a numeric type that's known to be a constant
10742 value at compile-time or for an aggregate type that's a
10743 literal constant. */
10744 STRIP_NOPS (arg);
10745
10746 /* If we know this is a constant, emit the constant of one. */
10747 if (CONSTANT_CLASS_P (arg)
10748 || (TREE_CODE (arg) == CONSTRUCTOR
10749 && TREE_CONSTANT (arg)))
10750 return integer_one_node;
10751 if (TREE_CODE (arg) == ADDR_EXPR)
10752 {
10753 tree op = TREE_OPERAND (arg, 0);
10754 if (TREE_CODE (op) == STRING_CST
10755 || (TREE_CODE (op) == ARRAY_REF
10756 && integer_zerop (TREE_OPERAND (op, 1))
10757 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
10758 return integer_one_node;
10759 }
10760
10761 /* If this expression has side effects, show we don't know it to be a
10762 constant. Likewise if it's a pointer or aggregate type since in
10763 those case we only want literals, since those are only optimized
10764 when generating RTL, not later.
10765 And finally, if we are compiling an initializer, not code, we
10766 need to return a definite result now; there's not going to be any
10767 more optimization done. */
10768 if (TREE_SIDE_EFFECTS (arg)
10769 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
10770 || POINTER_TYPE_P (TREE_TYPE (arg))
10771 || cfun == 0
10772 || folding_initializer
10773 || force_folding_builtin_constant_p)
10774 return integer_zero_node;
10775
10776 return NULL_TREE;
10777 }
10778
10779 /* Create builtin_expect or builtin_expect_with_probability
10780 with PRED and EXPECTED as its arguments and return it as a truthvalue.
10781 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
10782 builtin_expect_with_probability instead uses third argument as PROBABILITY
10783 value. */
10784
10785 static tree
10786 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
10787 tree predictor, tree probability)
10788 {
10789 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
10790
10791 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
10792 : BUILT_IN_EXPECT_WITH_PROBABILITY);
10793 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
10794 ret_type = TREE_TYPE (TREE_TYPE (fn));
10795 pred_type = TREE_VALUE (arg_types);
10796 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
10797
10798 pred = fold_convert_loc (loc, pred_type, pred);
10799 expected = fold_convert_loc (loc, expected_type, expected);
10800
10801 if (probability)
10802 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
10803 else
10804 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
10805 predictor);
10806
10807 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
10808 build_int_cst (ret_type, 0));
10809 }
10810
10811 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
10812 NULL_TREE if no simplification is possible. */
10813
10814 tree
10815 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
10816 tree arg3)
10817 {
10818 tree inner, fndecl, inner_arg0;
10819 enum tree_code code;
10820
10821 /* Distribute the expected value over short-circuiting operators.
10822 See through the cast from truthvalue_type_node to long. */
10823 inner_arg0 = arg0;
10824 while (CONVERT_EXPR_P (inner_arg0)
10825 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
10826 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
10827 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
10828
10829 /* If this is a builtin_expect within a builtin_expect keep the
10830 inner one. See through a comparison against a constant. It
10831 might have been added to create a thruthvalue. */
10832 inner = inner_arg0;
10833
10834 if (COMPARISON_CLASS_P (inner)
10835 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
10836 inner = TREE_OPERAND (inner, 0);
10837
10838 if (TREE_CODE (inner) == CALL_EXPR
10839 && (fndecl = get_callee_fndecl (inner))
10840 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
10841 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
10842 return arg0;
10843
10844 inner = inner_arg0;
10845 code = TREE_CODE (inner);
10846 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
10847 {
10848 tree op0 = TREE_OPERAND (inner, 0);
10849 tree op1 = TREE_OPERAND (inner, 1);
10850 arg1 = save_expr (arg1);
10851
10852 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
10853 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
10854 inner = build2 (code, TREE_TYPE (inner), op0, op1);
10855
10856 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
10857 }
10858
10859 /* If the argument isn't invariant then there's nothing else we can do. */
10860 if (!TREE_CONSTANT (inner_arg0))
10861 return NULL_TREE;
10862
10863 /* If we expect that a comparison against the argument will fold to
10864 a constant return the constant. In practice, this means a true
10865 constant or the address of a non-weak symbol. */
10866 inner = inner_arg0;
10867 STRIP_NOPS (inner);
10868 if (TREE_CODE (inner) == ADDR_EXPR)
10869 {
10870 do
10871 {
10872 inner = TREE_OPERAND (inner, 0);
10873 }
10874 while (TREE_CODE (inner) == COMPONENT_REF
10875 || TREE_CODE (inner) == ARRAY_REF);
10876 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
10877 return NULL_TREE;
10878 }
10879
10880 /* Otherwise, ARG0 already has the proper type for the return value. */
10881 return arg0;
10882 }
10883
10884 /* Fold a call to __builtin_classify_type with argument ARG. */
10885
10886 static tree
10887 fold_builtin_classify_type (tree arg)
10888 {
10889 if (arg == 0)
10890 return build_int_cst (integer_type_node, no_type_class);
10891
10892 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
10893 }
10894
10895 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
10896 ARG. */
10897
10898 static tree
10899 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
10900 {
10901 if (!validate_arg (arg, POINTER_TYPE))
10902 return NULL_TREE;
10903 else
10904 {
10905 c_strlen_data lendata = { };
10906 tree len = c_strlen (arg, 0, &lendata);
10907
10908 if (len)
10909 return fold_convert_loc (loc, type, len);
10910
10911 if (!lendata.decl)
10912 c_strlen (arg, 1, &lendata);
10913
10914 if (lendata.decl)
10915 {
10916 if (EXPR_HAS_LOCATION (arg))
10917 loc = EXPR_LOCATION (arg);
10918 else if (loc == UNKNOWN_LOCATION)
10919 loc = input_location;
10920 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
10921 }
10922
10923 return NULL_TREE;
10924 }
10925 }
10926
10927 /* Fold a call to __builtin_inf or __builtin_huge_val. */
10928
10929 static tree
10930 fold_builtin_inf (location_t loc, tree type, int warn)
10931 {
10932 REAL_VALUE_TYPE real;
10933
10934 /* __builtin_inff is intended to be usable to define INFINITY on all
10935 targets. If an infinity is not available, INFINITY expands "to a
10936 positive constant of type float that overflows at translation
10937 time", footnote "In this case, using INFINITY will violate the
10938 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
10939 Thus we pedwarn to ensure this constraint violation is
10940 diagnosed. */
10941 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
10942 pedwarn (loc, 0, "target format does not support infinity");
10943
10944 real_inf (&real);
10945 return build_real (type, real);
10946 }
10947
10948 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
10949 NULL_TREE if no simplification can be made. */
10950
10951 static tree
10952 fold_builtin_sincos (location_t loc,
10953 tree arg0, tree arg1, tree arg2)
10954 {
10955 tree type;
10956 tree fndecl, call = NULL_TREE;
10957
10958 if (!validate_arg (arg0, REAL_TYPE)
10959 || !validate_arg (arg1, POINTER_TYPE)
10960 || !validate_arg (arg2, POINTER_TYPE))
10961 return NULL_TREE;
10962
10963 type = TREE_TYPE (arg0);
10964
10965 /* Calculate the result when the argument is a constant. */
10966 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
10967 if (fn == END_BUILTINS)
10968 return NULL_TREE;
10969
10970 /* Canonicalize sincos to cexpi. */
10971 if (TREE_CODE (arg0) == REAL_CST)
10972 {
10973 tree complex_type = build_complex_type (type);
10974 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
10975 }
10976 if (!call)
10977 {
10978 if (!targetm.libc_has_function (function_c99_math_complex, type)
10979 || !builtin_decl_implicit_p (fn))
10980 return NULL_TREE;
10981 fndecl = builtin_decl_explicit (fn);
10982 call = build_call_expr_loc (loc, fndecl, 1, arg0);
10983 call = builtin_save_expr (call);
10984 }
10985
10986 tree ptype = build_pointer_type (type);
10987 arg1 = fold_convert (ptype, arg1);
10988 arg2 = fold_convert (ptype, arg2);
10989 return build2 (COMPOUND_EXPR, void_type_node,
10990 build2 (MODIFY_EXPR, void_type_node,
10991 build_fold_indirect_ref_loc (loc, arg1),
10992 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
10993 build2 (MODIFY_EXPR, void_type_node,
10994 build_fold_indirect_ref_loc (loc, arg2),
10995 fold_build1_loc (loc, REALPART_EXPR, type, call)));
10996 }
10997
10998 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
10999 Return NULL_TREE if no simplification can be made. */
11000
11001 static tree
11002 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
11003 {
11004 if (!validate_arg (arg1, POINTER_TYPE)
11005 || !validate_arg (arg2, POINTER_TYPE)
11006 || !validate_arg (len, INTEGER_TYPE))
11007 return NULL_TREE;
11008
11009 /* If the LEN parameter is zero, return zero. */
11010 if (integer_zerop (len))
11011 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
11012 arg1, arg2);
11013
11014 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
11015 if (operand_equal_p (arg1, arg2, 0))
11016 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
11017
11018 /* If len parameter is one, return an expression corresponding to
11019 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
11020 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
11021 {
11022 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
11023 tree cst_uchar_ptr_node
11024 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
11025
11026 tree ind1
11027 = fold_convert_loc (loc, integer_type_node,
11028 build1 (INDIRECT_REF, cst_uchar_node,
11029 fold_convert_loc (loc,
11030 cst_uchar_ptr_node,
11031 arg1)));
11032 tree ind2
11033 = fold_convert_loc (loc, integer_type_node,
11034 build1 (INDIRECT_REF, cst_uchar_node,
11035 fold_convert_loc (loc,
11036 cst_uchar_ptr_node,
11037 arg2)));
11038 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
11039 }
11040
11041 return NULL_TREE;
11042 }
11043
11044 /* Fold a call to builtin isascii with argument ARG. */
11045
11046 static tree
11047 fold_builtin_isascii (location_t loc, tree arg)
11048 {
11049 if (!validate_arg (arg, INTEGER_TYPE))
11050 return NULL_TREE;
11051 else
11052 {
11053 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
11054 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
11055 build_int_cst (integer_type_node,
11056 ~ (unsigned HOST_WIDE_INT) 0x7f));
11057 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
11058 arg, integer_zero_node);
11059 }
11060 }
11061
11062 /* Fold a call to builtin toascii with argument ARG. */
11063
11064 static tree
11065 fold_builtin_toascii (location_t loc, tree arg)
11066 {
11067 if (!validate_arg (arg, INTEGER_TYPE))
11068 return NULL_TREE;
11069
11070 /* Transform toascii(c) -> (c & 0x7f). */
11071 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
11072 build_int_cst (integer_type_node, 0x7f));
11073 }
11074
11075 /* Fold a call to builtin isdigit with argument ARG. */
11076
11077 static tree
11078 fold_builtin_isdigit (location_t loc, tree arg)
11079 {
11080 if (!validate_arg (arg, INTEGER_TYPE))
11081 return NULL_TREE;
11082 else
11083 {
11084 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
11085 /* According to the C standard, isdigit is unaffected by locale.
11086 However, it definitely is affected by the target character set. */
11087 unsigned HOST_WIDE_INT target_digit0
11088 = lang_hooks.to_target_charset ('0');
11089
11090 if (target_digit0 == 0)
11091 return NULL_TREE;
11092
11093 arg = fold_convert_loc (loc, unsigned_type_node, arg);
11094 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
11095 build_int_cst (unsigned_type_node, target_digit0));
11096 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
11097 build_int_cst (unsigned_type_node, 9));
11098 }
11099 }
11100
11101 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
11102
11103 static tree
11104 fold_builtin_fabs (location_t loc, tree arg, tree type)
11105 {
11106 if (!validate_arg (arg, REAL_TYPE))
11107 return NULL_TREE;
11108
11109 arg = fold_convert_loc (loc, type, arg);
11110 return fold_build1_loc (loc, ABS_EXPR, type, arg);
11111 }
11112
11113 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
11114
11115 static tree
11116 fold_builtin_abs (location_t loc, tree arg, tree type)
11117 {
11118 if (!validate_arg (arg, INTEGER_TYPE))
11119 return NULL_TREE;
11120
11121 arg = fold_convert_loc (loc, type, arg);
11122 return fold_build1_loc (loc, ABS_EXPR, type, arg);
11123 }
11124
11125 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
11126
11127 static tree
11128 fold_builtin_carg (location_t loc, tree arg, tree type)
11129 {
11130 if (validate_arg (arg, COMPLEX_TYPE)
11131 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
11132 {
11133 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
11134
11135 if (atan2_fn)
11136 {
11137 tree new_arg = builtin_save_expr (arg);
11138 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
11139 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
11140 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
11141 }
11142 }
11143
11144 return NULL_TREE;
11145 }
11146
11147 /* Fold a call to builtin frexp, we can assume the base is 2. */
11148
11149 static tree
11150 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
11151 {
11152 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11153 return NULL_TREE;
11154
11155 STRIP_NOPS (arg0);
11156
11157 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11158 return NULL_TREE;
11159
11160 arg1 = build_fold_indirect_ref_loc (loc, arg1);
11161
11162 /* Proceed if a valid pointer type was passed in. */
11163 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
11164 {
11165 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11166 tree frac, exp;
11167
11168 switch (value->cl)
11169 {
11170 case rvc_zero:
11171 /* For +-0, return (*exp = 0, +-0). */
11172 exp = integer_zero_node;
11173 frac = arg0;
11174 break;
11175 case rvc_nan:
11176 case rvc_inf:
11177 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
11178 return omit_one_operand_loc (loc, rettype, arg0, arg1);
11179 case rvc_normal:
11180 {
11181 /* Since the frexp function always expects base 2, and in
11182 GCC normalized significands are already in the range
11183 [0.5, 1.0), we have exactly what frexp wants. */
11184 REAL_VALUE_TYPE frac_rvt = *value;
11185 SET_REAL_EXP (&frac_rvt, 0);
11186 frac = build_real (rettype, frac_rvt);
11187 exp = build_int_cst (integer_type_node, REAL_EXP (value));
11188 }
11189 break;
11190 default:
11191 gcc_unreachable ();
11192 }
11193
11194 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11195 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
11196 TREE_SIDE_EFFECTS (arg1) = 1;
11197 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
11198 }
11199
11200 return NULL_TREE;
11201 }
11202
11203 /* Fold a call to builtin modf. */
11204
11205 static tree
11206 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
11207 {
11208 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11209 return NULL_TREE;
11210
11211 STRIP_NOPS (arg0);
11212
11213 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11214 return NULL_TREE;
11215
11216 arg1 = build_fold_indirect_ref_loc (loc, arg1);
11217
11218 /* Proceed if a valid pointer type was passed in. */
11219 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
11220 {
11221 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11222 REAL_VALUE_TYPE trunc, frac;
11223
11224 switch (value->cl)
11225 {
11226 case rvc_nan:
11227 case rvc_zero:
11228 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
11229 trunc = frac = *value;
11230 break;
11231 case rvc_inf:
11232 /* For +-Inf, return (*arg1 = arg0, +-0). */
11233 frac = dconst0;
11234 frac.sign = value->sign;
11235 trunc = *value;
11236 break;
11237 case rvc_normal:
11238 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
11239 real_trunc (&trunc, VOIDmode, value);
11240 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
11241 /* If the original number was negative and already
11242 integral, then the fractional part is -0.0. */
11243 if (value->sign && frac.cl == rvc_zero)
11244 frac.sign = value->sign;
11245 break;
11246 }
11247
11248 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11249 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
11250 build_real (rettype, trunc));
11251 TREE_SIDE_EFFECTS (arg1) = 1;
11252 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
11253 build_real (rettype, frac));
11254 }
11255
11256 return NULL_TREE;
11257 }
11258
11259 /* Given a location LOC, an interclass builtin function decl FNDECL
11260 and its single argument ARG, return an folded expression computing
11261 the same, or NULL_TREE if we either couldn't or didn't want to fold
11262 (the latter happen if there's an RTL instruction available). */
11263
11264 static tree
11265 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
11266 {
11267 machine_mode mode;
11268
11269 if (!validate_arg (arg, REAL_TYPE))
11270 return NULL_TREE;
11271
11272 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
11273 return NULL_TREE;
11274
11275 mode = TYPE_MODE (TREE_TYPE (arg));
11276
11277 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
11278
11279 /* If there is no optab, try generic code. */
11280 switch (DECL_FUNCTION_CODE (fndecl))
11281 {
11282 tree result;
11283
11284 CASE_FLT_FN (BUILT_IN_ISINF):
11285 {
11286 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
11287 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11288 tree type = TREE_TYPE (arg);
11289 REAL_VALUE_TYPE r;
11290 char buf[128];
11291
11292 if (is_ibm_extended)
11293 {
11294 /* NaN and Inf are encoded in the high-order double value
11295 only. The low-order value is not significant. */
11296 type = double_type_node;
11297 mode = DFmode;
11298 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11299 }
11300 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11301 real_from_string (&r, buf);
11302 result = build_call_expr (isgr_fn, 2,
11303 fold_build1_loc (loc, ABS_EXPR, type, arg),
11304 build_real (type, r));
11305 return result;
11306 }
11307 CASE_FLT_FN (BUILT_IN_FINITE):
11308 case BUILT_IN_ISFINITE:
11309 {
11310 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
11311 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11312 tree type = TREE_TYPE (arg);
11313 REAL_VALUE_TYPE r;
11314 char buf[128];
11315
11316 if (is_ibm_extended)
11317 {
11318 /* NaN and Inf are encoded in the high-order double value
11319 only. The low-order value is not significant. */
11320 type = double_type_node;
11321 mode = DFmode;
11322 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11323 }
11324 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11325 real_from_string (&r, buf);
11326 result = build_call_expr (isle_fn, 2,
11327 fold_build1_loc (loc, ABS_EXPR, type, arg),
11328 build_real (type, r));
11329 /*result = fold_build2_loc (loc, UNGT_EXPR,
11330 TREE_TYPE (TREE_TYPE (fndecl)),
11331 fold_build1_loc (loc, ABS_EXPR, type, arg),
11332 build_real (type, r));
11333 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
11334 TREE_TYPE (TREE_TYPE (fndecl)),
11335 result);*/
11336 return result;
11337 }
11338 case BUILT_IN_ISNORMAL:
11339 {
11340 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
11341 islessequal(fabs(x),DBL_MAX). */
11342 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11343 tree type = TREE_TYPE (arg);
11344 tree orig_arg, max_exp, min_exp;
11345 machine_mode orig_mode = mode;
11346 REAL_VALUE_TYPE rmax, rmin;
11347 char buf[128];
11348
11349 orig_arg = arg = builtin_save_expr (arg);
11350 if (is_ibm_extended)
11351 {
11352 /* Use double to test the normal range of IBM extended
11353 precision. Emin for IBM extended precision is
11354 different to emin for IEEE double, being 53 higher
11355 since the low double exponent is at least 53 lower
11356 than the high double exponent. */
11357 type = double_type_node;
11358 mode = DFmode;
11359 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11360 }
11361 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
11362
11363 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11364 real_from_string (&rmax, buf);
11365 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
11366 real_from_string (&rmin, buf);
11367 max_exp = build_real (type, rmax);
11368 min_exp = build_real (type, rmin);
11369
11370 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
11371 if (is_ibm_extended)
11372 {
11373 /* Testing the high end of the range is done just using
11374 the high double, using the same test as isfinite().
11375 For the subnormal end of the range we first test the
11376 high double, then if its magnitude is equal to the
11377 limit of 0x1p-969, we test whether the low double is
11378 non-zero and opposite sign to the high double. */
11379 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
11380 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11381 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
11382 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
11383 arg, min_exp);
11384 tree as_complex = build1 (VIEW_CONVERT_EXPR,
11385 complex_double_type_node, orig_arg);
11386 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
11387 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
11388 tree zero = build_real (type, dconst0);
11389 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
11390 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
11391 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
11392 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
11393 fold_build3 (COND_EXPR,
11394 integer_type_node,
11395 hilt, logt, lolt));
11396 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
11397 eq_min, ok_lo);
11398 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
11399 gt_min, eq_min);
11400 }
11401 else
11402 {
11403 tree const isge_fn
11404 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
11405 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
11406 }
11407 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
11408 max_exp, min_exp);
11409 return result;
11410 }
11411 default:
11412 break;
11413 }
11414
11415 return NULL_TREE;
11416 }
11417
11418 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
11419 ARG is the argument for the call. */
11420
11421 static tree
11422 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
11423 {
11424 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11425
11426 if (!validate_arg (arg, REAL_TYPE))
11427 return NULL_TREE;
11428
11429 switch (builtin_index)
11430 {
11431 case BUILT_IN_ISINF:
11432 if (tree_expr_infinite_p (arg))
11433 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11434 if (!tree_expr_maybe_infinite_p (arg))
11435 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11436 return NULL_TREE;
11437
11438 case BUILT_IN_ISINF_SIGN:
11439 {
11440 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
11441 /* In a boolean context, GCC will fold the inner COND_EXPR to
11442 1. So e.g. "if (isinf_sign(x))" would be folded to just
11443 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
11444 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
11445 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
11446 tree tmp = NULL_TREE;
11447
11448 arg = builtin_save_expr (arg);
11449
11450 if (signbit_fn && isinf_fn)
11451 {
11452 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
11453 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
11454
11455 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11456 signbit_call, integer_zero_node);
11457 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11458 isinf_call, integer_zero_node);
11459
11460 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
11461 integer_minus_one_node, integer_one_node);
11462 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11463 isinf_call, tmp,
11464 integer_zero_node);
11465 }
11466
11467 return tmp;
11468 }
11469
11470 case BUILT_IN_ISFINITE:
11471 if (tree_expr_finite_p (arg))
11472 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11473 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
11474 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11475 return NULL_TREE;
11476
11477 case BUILT_IN_ISNAN:
11478 if (tree_expr_nan_p (arg))
11479 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11480 if (!tree_expr_maybe_nan_p (arg))
11481 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11482
11483 {
11484 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
11485 if (is_ibm_extended)
11486 {
11487 /* NaN and Inf are encoded in the high-order double value
11488 only. The low-order value is not significant. */
11489 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
11490 }
11491 }
11492 arg = builtin_save_expr (arg);
11493 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
11494
11495 default:
11496 gcc_unreachable ();
11497 }
11498 }
11499
11500 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
11501 This builtin will generate code to return the appropriate floating
11502 point classification depending on the value of the floating point
11503 number passed in. The possible return values must be supplied as
11504 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
11505 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
11506 one floating point argument which is "type generic". */
11507
11508 static tree
11509 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
11510 {
11511 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
11512 arg, type, res, tmp;
11513 machine_mode mode;
11514 REAL_VALUE_TYPE r;
11515 char buf[128];
11516
11517 /* Verify the required arguments in the original call. */
11518 if (nargs != 6
11519 || !validate_arg (args[0], INTEGER_TYPE)
11520 || !validate_arg (args[1], INTEGER_TYPE)
11521 || !validate_arg (args[2], INTEGER_TYPE)
11522 || !validate_arg (args[3], INTEGER_TYPE)
11523 || !validate_arg (args[4], INTEGER_TYPE)
11524 || !validate_arg (args[5], REAL_TYPE))
11525 return NULL_TREE;
11526
11527 fp_nan = args[0];
11528 fp_infinite = args[1];
11529 fp_normal = args[2];
11530 fp_subnormal = args[3];
11531 fp_zero = args[4];
11532 arg = args[5];
11533 type = TREE_TYPE (arg);
11534 mode = TYPE_MODE (type);
11535 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
11536
11537 /* fpclassify(x) ->
11538 isnan(x) ? FP_NAN :
11539 (fabs(x) == Inf ? FP_INFINITE :
11540 (fabs(x) >= DBL_MIN ? FP_NORMAL :
11541 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
11542
11543 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11544 build_real (type, dconst0));
11545 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11546 tmp, fp_zero, fp_subnormal);
11547
11548 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
11549 real_from_string (&r, buf);
11550 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
11551 arg, build_real (type, r));
11552 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
11553
11554 if (tree_expr_maybe_infinite_p (arg))
11555 {
11556 real_inf (&r);
11557 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11558 build_real (type, r));
11559 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
11560 fp_infinite, res);
11561 }
11562
11563 if (tree_expr_maybe_nan_p (arg))
11564 {
11565 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
11566 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
11567 }
11568
11569 return res;
11570 }
11571
11572 /* Fold a call to an unordered comparison function such as
11573 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
11574 being called and ARG0 and ARG1 are the arguments for the call.
11575 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
11576 the opposite of the desired result. UNORDERED_CODE is used
11577 for modes that can hold NaNs and ORDERED_CODE is used for
11578 the rest. */
11579
11580 static tree
11581 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
11582 enum tree_code unordered_code,
11583 enum tree_code ordered_code)
11584 {
11585 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11586 enum tree_code code;
11587 tree type0, type1;
11588 enum tree_code code0, code1;
11589 tree cmp_type = NULL_TREE;
11590
11591 type0 = TREE_TYPE (arg0);
11592 type1 = TREE_TYPE (arg1);
11593
11594 code0 = TREE_CODE (type0);
11595 code1 = TREE_CODE (type1);
11596
11597 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
11598 /* Choose the wider of two real types. */
11599 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
11600 ? type0 : type1;
11601 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
11602 cmp_type = type0;
11603 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
11604 cmp_type = type1;
11605
11606 arg0 = fold_convert_loc (loc, cmp_type, arg0);
11607 arg1 = fold_convert_loc (loc, cmp_type, arg1);
11608
11609 if (unordered_code == UNORDERED_EXPR)
11610 {
11611 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
11612 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
11613 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
11614 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
11615 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
11616 }
11617
11618 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
11619 ? unordered_code : ordered_code;
11620 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
11621 fold_build2_loc (loc, code, type, arg0, arg1));
11622 }
11623
11624 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
11625 arithmetics if it can never overflow, or into internal functions that
11626 return both result of arithmetics and overflowed boolean flag in
11627 a complex integer result, or some other check for overflow.
11628 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
11629 checking part of that. */
11630
11631 static tree
11632 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
11633 tree arg0, tree arg1, tree arg2)
11634 {
11635 enum internal_fn ifn = IFN_LAST;
11636 /* The code of the expression corresponding to the built-in. */
11637 enum tree_code opcode = ERROR_MARK;
11638 bool ovf_only = false;
11639
11640 switch (fcode)
11641 {
11642 case BUILT_IN_ADD_OVERFLOW_P:
11643 ovf_only = true;
11644 /* FALLTHRU */
11645 case BUILT_IN_ADD_OVERFLOW:
11646 case BUILT_IN_SADD_OVERFLOW:
11647 case BUILT_IN_SADDL_OVERFLOW:
11648 case BUILT_IN_SADDLL_OVERFLOW:
11649 case BUILT_IN_UADD_OVERFLOW:
11650 case BUILT_IN_UADDL_OVERFLOW:
11651 case BUILT_IN_UADDLL_OVERFLOW:
11652 opcode = PLUS_EXPR;
11653 ifn = IFN_ADD_OVERFLOW;
11654 break;
11655 case BUILT_IN_SUB_OVERFLOW_P:
11656 ovf_only = true;
11657 /* FALLTHRU */
11658 case BUILT_IN_SUB_OVERFLOW:
11659 case BUILT_IN_SSUB_OVERFLOW:
11660 case BUILT_IN_SSUBL_OVERFLOW:
11661 case BUILT_IN_SSUBLL_OVERFLOW:
11662 case BUILT_IN_USUB_OVERFLOW:
11663 case BUILT_IN_USUBL_OVERFLOW:
11664 case BUILT_IN_USUBLL_OVERFLOW:
11665 opcode = MINUS_EXPR;
11666 ifn = IFN_SUB_OVERFLOW;
11667 break;
11668 case BUILT_IN_MUL_OVERFLOW_P:
11669 ovf_only = true;
11670 /* FALLTHRU */
11671 case BUILT_IN_MUL_OVERFLOW:
11672 case BUILT_IN_SMUL_OVERFLOW:
11673 case BUILT_IN_SMULL_OVERFLOW:
11674 case BUILT_IN_SMULLL_OVERFLOW:
11675 case BUILT_IN_UMUL_OVERFLOW:
11676 case BUILT_IN_UMULL_OVERFLOW:
11677 case BUILT_IN_UMULLL_OVERFLOW:
11678 opcode = MULT_EXPR;
11679 ifn = IFN_MUL_OVERFLOW;
11680 break;
11681 default:
11682 gcc_unreachable ();
11683 }
11684
11685 /* For the "generic" overloads, the first two arguments can have different
11686 types and the last argument determines the target type to use to check
11687 for overflow. The arguments of the other overloads all have the same
11688 type. */
11689 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
11690
11691 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
11692 arguments are constant, attempt to fold the built-in call into a constant
11693 expression indicating whether or not it detected an overflow. */
11694 if (ovf_only
11695 && TREE_CODE (arg0) == INTEGER_CST
11696 && TREE_CODE (arg1) == INTEGER_CST)
11697 /* Perform the computation in the target type and check for overflow. */
11698 return omit_one_operand_loc (loc, boolean_type_node,
11699 arith_overflowed_p (opcode, type, arg0, arg1)
11700 ? boolean_true_node : boolean_false_node,
11701 arg2);
11702
11703 tree intres, ovfres;
11704 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
11705 {
11706 intres = fold_binary_loc (loc, opcode, type,
11707 fold_convert_loc (loc, type, arg0),
11708 fold_convert_loc (loc, type, arg1));
11709 if (TREE_OVERFLOW (intres))
11710 intres = drop_tree_overflow (intres);
11711 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
11712 ? boolean_true_node : boolean_false_node);
11713 }
11714 else
11715 {
11716 tree ctype = build_complex_type (type);
11717 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
11718 arg0, arg1);
11719 tree tgt = save_expr (call);
11720 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
11721 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
11722 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
11723 }
11724
11725 if (ovf_only)
11726 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
11727
11728 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
11729 tree store
11730 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
11731 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
11732 }
11733
11734 /* Fold a call to __builtin_FILE to a constant string. */
11735
11736 static inline tree
11737 fold_builtin_FILE (location_t loc)
11738 {
11739 if (const char *fname = LOCATION_FILE (loc))
11740 {
11741 /* The documentation says this builtin is equivalent to the preprocessor
11742 __FILE__ macro so it appears appropriate to use the same file prefix
11743 mappings. */
11744 fname = remap_macro_filename (fname);
11745 return build_string_literal (strlen (fname) + 1, fname);
11746 }
11747
11748 return build_string_literal (1, "");
11749 }
11750
11751 /* Fold a call to __builtin_FUNCTION to a constant string. */
11752
11753 static inline tree
11754 fold_builtin_FUNCTION ()
11755 {
11756 const char *name = "";
11757
11758 if (current_function_decl)
11759 name = lang_hooks.decl_printable_name (current_function_decl, 0);
11760
11761 return build_string_literal (strlen (name) + 1, name);
11762 }
11763
11764 /* Fold a call to __builtin_LINE to an integer constant. */
11765
11766 static inline tree
11767 fold_builtin_LINE (location_t loc, tree type)
11768 {
11769 return build_int_cst (type, LOCATION_LINE (loc));
11770 }
11771
11772 /* Fold a call to built-in function FNDECL with 0 arguments.
11773 This function returns NULL_TREE if no simplification was possible. */
11774
11775 static tree
11776 fold_builtin_0 (location_t loc, tree fndecl)
11777 {
11778 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11779 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11780 switch (fcode)
11781 {
11782 case BUILT_IN_FILE:
11783 return fold_builtin_FILE (loc);
11784
11785 case BUILT_IN_FUNCTION:
11786 return fold_builtin_FUNCTION ();
11787
11788 case BUILT_IN_LINE:
11789 return fold_builtin_LINE (loc, type);
11790
11791 CASE_FLT_FN (BUILT_IN_INF):
11792 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
11793 case BUILT_IN_INFD32:
11794 case BUILT_IN_INFD64:
11795 case BUILT_IN_INFD128:
11796 return fold_builtin_inf (loc, type, true);
11797
11798 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
11799 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
11800 return fold_builtin_inf (loc, type, false);
11801
11802 case BUILT_IN_CLASSIFY_TYPE:
11803 return fold_builtin_classify_type (NULL_TREE);
11804
11805 default:
11806 break;
11807 }
11808 return NULL_TREE;
11809 }
11810
11811 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
11812 This function returns NULL_TREE if no simplification was possible. */
11813
11814 static tree
11815 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
11816 {
11817 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11818 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11819
11820 if (TREE_CODE (arg0) == ERROR_MARK)
11821 return NULL_TREE;
11822
11823 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
11824 return ret;
11825
11826 switch (fcode)
11827 {
11828 case BUILT_IN_CONSTANT_P:
11829 {
11830 tree val = fold_builtin_constant_p (arg0);
11831
11832 /* Gimplification will pull the CALL_EXPR for the builtin out of
11833 an if condition. When not optimizing, we'll not CSE it back.
11834 To avoid link error types of regressions, return false now. */
11835 if (!val && !optimize)
11836 val = integer_zero_node;
11837
11838 return val;
11839 }
11840
11841 case BUILT_IN_CLASSIFY_TYPE:
11842 return fold_builtin_classify_type (arg0);
11843
11844 case BUILT_IN_STRLEN:
11845 return fold_builtin_strlen (loc, expr, type, arg0);
11846
11847 CASE_FLT_FN (BUILT_IN_FABS):
11848 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11849 case BUILT_IN_FABSD32:
11850 case BUILT_IN_FABSD64:
11851 case BUILT_IN_FABSD128:
11852 return fold_builtin_fabs (loc, arg0, type);
11853
11854 case BUILT_IN_ABS:
11855 case BUILT_IN_LABS:
11856 case BUILT_IN_LLABS:
11857 case BUILT_IN_IMAXABS:
11858 return fold_builtin_abs (loc, arg0, type);
11859
11860 CASE_FLT_FN (BUILT_IN_CONJ):
11861 if (validate_arg (arg0, COMPLEX_TYPE)
11862 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11863 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
11864 break;
11865
11866 CASE_FLT_FN (BUILT_IN_CREAL):
11867 if (validate_arg (arg0, COMPLEX_TYPE)
11868 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11869 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
11870 break;
11871
11872 CASE_FLT_FN (BUILT_IN_CIMAG):
11873 if (validate_arg (arg0, COMPLEX_TYPE)
11874 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11875 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
11876 break;
11877
11878 CASE_FLT_FN (BUILT_IN_CARG):
11879 return fold_builtin_carg (loc, arg0, type);
11880
11881 case BUILT_IN_ISASCII:
11882 return fold_builtin_isascii (loc, arg0);
11883
11884 case BUILT_IN_TOASCII:
11885 return fold_builtin_toascii (loc, arg0);
11886
11887 case BUILT_IN_ISDIGIT:
11888 return fold_builtin_isdigit (loc, arg0);
11889
11890 CASE_FLT_FN (BUILT_IN_FINITE):
11891 case BUILT_IN_FINITED32:
11892 case BUILT_IN_FINITED64:
11893 case BUILT_IN_FINITED128:
11894 case BUILT_IN_ISFINITE:
11895 {
11896 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
11897 if (ret)
11898 return ret;
11899 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11900 }
11901
11902 CASE_FLT_FN (BUILT_IN_ISINF):
11903 case BUILT_IN_ISINFD32:
11904 case BUILT_IN_ISINFD64:
11905 case BUILT_IN_ISINFD128:
11906 {
11907 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
11908 if (ret)
11909 return ret;
11910 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11911 }
11912
11913 case BUILT_IN_ISNORMAL:
11914 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11915
11916 case BUILT_IN_ISINF_SIGN:
11917 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
11918
11919 CASE_FLT_FN (BUILT_IN_ISNAN):
11920 case BUILT_IN_ISNAND32:
11921 case BUILT_IN_ISNAND64:
11922 case BUILT_IN_ISNAND128:
11923 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
11924
11925 case BUILT_IN_FREE:
11926 if (integer_zerop (arg0))
11927 return build_empty_stmt (loc);
11928 break;
11929
11930 default:
11931 break;
11932 }
11933
11934 return NULL_TREE;
11935
11936 }
11937
11938 /* Folds a call EXPR (which may be null) to built-in function FNDECL
11939 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
11940 if no simplification was possible. */
11941
11942 static tree
11943 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
11944 {
11945 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11946 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11947
11948 if (TREE_CODE (arg0) == ERROR_MARK
11949 || TREE_CODE (arg1) == ERROR_MARK)
11950 return NULL_TREE;
11951
11952 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
11953 return ret;
11954
11955 switch (fcode)
11956 {
11957 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
11958 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
11959 if (validate_arg (arg0, REAL_TYPE)
11960 && validate_arg (arg1, POINTER_TYPE))
11961 return do_mpfr_lgamma_r (arg0, arg1, type);
11962 break;
11963
11964 CASE_FLT_FN (BUILT_IN_FREXP):
11965 return fold_builtin_frexp (loc, arg0, arg1, type);
11966
11967 CASE_FLT_FN (BUILT_IN_MODF):
11968 return fold_builtin_modf (loc, arg0, arg1, type);
11969
11970 case BUILT_IN_STRSPN:
11971 return fold_builtin_strspn (loc, expr, arg0, arg1);
11972
11973 case BUILT_IN_STRCSPN:
11974 return fold_builtin_strcspn (loc, expr, arg0, arg1);
11975
11976 case BUILT_IN_STRPBRK:
11977 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
11978
11979 case BUILT_IN_EXPECT:
11980 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
11981
11982 case BUILT_IN_ISGREATER:
11983 return fold_builtin_unordered_cmp (loc, fndecl,
11984 arg0, arg1, UNLE_EXPR, LE_EXPR);
11985 case BUILT_IN_ISGREATEREQUAL:
11986 return fold_builtin_unordered_cmp (loc, fndecl,
11987 arg0, arg1, UNLT_EXPR, LT_EXPR);
11988 case BUILT_IN_ISLESS:
11989 return fold_builtin_unordered_cmp (loc, fndecl,
11990 arg0, arg1, UNGE_EXPR, GE_EXPR);
11991 case BUILT_IN_ISLESSEQUAL:
11992 return fold_builtin_unordered_cmp (loc, fndecl,
11993 arg0, arg1, UNGT_EXPR, GT_EXPR);
11994 case BUILT_IN_ISLESSGREATER:
11995 return fold_builtin_unordered_cmp (loc, fndecl,
11996 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
11997 case BUILT_IN_ISUNORDERED:
11998 return fold_builtin_unordered_cmp (loc, fndecl,
11999 arg0, arg1, UNORDERED_EXPR,
12000 NOP_EXPR);
12001
12002 /* We do the folding for va_start in the expander. */
12003 case BUILT_IN_VA_START:
12004 break;
12005
12006 case BUILT_IN_OBJECT_SIZE:
12007 return fold_builtin_object_size (arg0, arg1);
12008
12009 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
12010 return fold_builtin_atomic_always_lock_free (arg0, arg1);
12011
12012 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
12013 return fold_builtin_atomic_is_lock_free (arg0, arg1);
12014
12015 default:
12016 break;
12017 }
12018 return NULL_TREE;
12019 }
12020
12021 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
12022 and ARG2.
12023 This function returns NULL_TREE if no simplification was possible. */
12024
12025 static tree
12026 fold_builtin_3 (location_t loc, tree fndecl,
12027 tree arg0, tree arg1, tree arg2)
12028 {
12029 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12030 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12031
12032 if (TREE_CODE (arg0) == ERROR_MARK
12033 || TREE_CODE (arg1) == ERROR_MARK
12034 || TREE_CODE (arg2) == ERROR_MARK)
12035 return NULL_TREE;
12036
12037 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
12038 arg0, arg1, arg2))
12039 return ret;
12040
12041 switch (fcode)
12042 {
12043
12044 CASE_FLT_FN (BUILT_IN_SINCOS):
12045 return fold_builtin_sincos (loc, arg0, arg1, arg2);
12046
12047 CASE_FLT_FN (BUILT_IN_REMQUO):
12048 if (validate_arg (arg0, REAL_TYPE)
12049 && validate_arg (arg1, REAL_TYPE)
12050 && validate_arg (arg2, POINTER_TYPE))
12051 return do_mpfr_remquo (arg0, arg1, arg2);
12052 break;
12053
12054 case BUILT_IN_MEMCMP:
12055 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
12056
12057 case BUILT_IN_EXPECT:
12058 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
12059
12060 case BUILT_IN_EXPECT_WITH_PROBABILITY:
12061 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
12062
12063 case BUILT_IN_ADD_OVERFLOW:
12064 case BUILT_IN_SUB_OVERFLOW:
12065 case BUILT_IN_MUL_OVERFLOW:
12066 case BUILT_IN_ADD_OVERFLOW_P:
12067 case BUILT_IN_SUB_OVERFLOW_P:
12068 case BUILT_IN_MUL_OVERFLOW_P:
12069 case BUILT_IN_SADD_OVERFLOW:
12070 case BUILT_IN_SADDL_OVERFLOW:
12071 case BUILT_IN_SADDLL_OVERFLOW:
12072 case BUILT_IN_SSUB_OVERFLOW:
12073 case BUILT_IN_SSUBL_OVERFLOW:
12074 case BUILT_IN_SSUBLL_OVERFLOW:
12075 case BUILT_IN_SMUL_OVERFLOW:
12076 case BUILT_IN_SMULL_OVERFLOW:
12077 case BUILT_IN_SMULLL_OVERFLOW:
12078 case BUILT_IN_UADD_OVERFLOW:
12079 case BUILT_IN_UADDL_OVERFLOW:
12080 case BUILT_IN_UADDLL_OVERFLOW:
12081 case BUILT_IN_USUB_OVERFLOW:
12082 case BUILT_IN_USUBL_OVERFLOW:
12083 case BUILT_IN_USUBLL_OVERFLOW:
12084 case BUILT_IN_UMUL_OVERFLOW:
12085 case BUILT_IN_UMULL_OVERFLOW:
12086 case BUILT_IN_UMULLL_OVERFLOW:
12087 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
12088
12089 default:
12090 break;
12091 }
12092 return NULL_TREE;
12093 }
12094
12095 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
12096 ARGS is an array of NARGS arguments. IGNORE is true if the result
12097 of the function call is ignored. This function returns NULL_TREE
12098 if no simplification was possible. */
12099
12100 static tree
12101 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
12102 int nargs, bool)
12103 {
12104 tree ret = NULL_TREE;
12105
12106 switch (nargs)
12107 {
12108 case 0:
12109 ret = fold_builtin_0 (loc, fndecl);
12110 break;
12111 case 1:
12112 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
12113 break;
12114 case 2:
12115 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
12116 break;
12117 case 3:
12118 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
12119 break;
12120 default:
12121 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
12122 break;
12123 }
12124 if (ret)
12125 {
12126 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
12127 SET_EXPR_LOCATION (ret, loc);
12128 return ret;
12129 }
12130 return NULL_TREE;
12131 }
12132
12133 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
12134 list ARGS along with N new arguments in NEWARGS. SKIP is the number
12135 of arguments in ARGS to be omitted. OLDNARGS is the number of
12136 elements in ARGS. */
12137
12138 static tree
12139 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
12140 int skip, tree fndecl, int n, va_list newargs)
12141 {
12142 int nargs = oldnargs - skip + n;
12143 tree *buffer;
12144
12145 if (n > 0)
12146 {
12147 int i, j;
12148
12149 buffer = XALLOCAVEC (tree, nargs);
12150 for (i = 0; i < n; i++)
12151 buffer[i] = va_arg (newargs, tree);
12152 for (j = skip; j < oldnargs; j++, i++)
12153 buffer[i] = args[j];
12154 }
12155 else
12156 buffer = args + skip;
12157
12158 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
12159 }
12160
12161 /* Return true if FNDECL shouldn't be folded right now.
12162 If a built-in function has an inline attribute always_inline
12163 wrapper, defer folding it after always_inline functions have
12164 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
12165 might not be performed. */
12166
12167 bool
12168 avoid_folding_inline_builtin (tree fndecl)
12169 {
12170 return (DECL_DECLARED_INLINE_P (fndecl)
12171 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
12172 && cfun
12173 && !cfun->always_inline_functions_inlined
12174 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
12175 }
12176
12177 /* A wrapper function for builtin folding that prevents warnings for
12178 "statement without effect" and the like, caused by removing the
12179 call node earlier than the warning is generated. */
12180
12181 tree
12182 fold_call_expr (location_t loc, tree exp, bool ignore)
12183 {
12184 tree ret = NULL_TREE;
12185 tree fndecl = get_callee_fndecl (exp);
12186 if (fndecl && fndecl_built_in_p (fndecl)
12187 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
12188 yet. Defer folding until we see all the arguments
12189 (after inlining). */
12190 && !CALL_EXPR_VA_ARG_PACK (exp))
12191 {
12192 int nargs = call_expr_nargs (exp);
12193
12194 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
12195 instead last argument is __builtin_va_arg_pack (). Defer folding
12196 even in that case, until arguments are finalized. */
12197 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
12198 {
12199 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
12200 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12201 return NULL_TREE;
12202 }
12203
12204 if (avoid_folding_inline_builtin (fndecl))
12205 return NULL_TREE;
12206
12207 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12208 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
12209 CALL_EXPR_ARGP (exp), ignore);
12210 else
12211 {
12212 tree *args = CALL_EXPR_ARGP (exp);
12213 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
12214 if (ret)
12215 return ret;
12216 }
12217 }
12218 return NULL_TREE;
12219 }
12220
12221 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
12222 N arguments are passed in the array ARGARRAY. Return a folded
12223 expression or NULL_TREE if no simplification was possible. */
12224
12225 tree
12226 fold_builtin_call_array (location_t loc, tree,
12227 tree fn,
12228 int n,
12229 tree *argarray)
12230 {
12231 if (TREE_CODE (fn) != ADDR_EXPR)
12232 return NULL_TREE;
12233
12234 tree fndecl = TREE_OPERAND (fn, 0);
12235 if (TREE_CODE (fndecl) == FUNCTION_DECL
12236 && fndecl_built_in_p (fndecl))
12237 {
12238 /* If last argument is __builtin_va_arg_pack (), arguments to this
12239 function are not finalized yet. Defer folding until they are. */
12240 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
12241 {
12242 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
12243 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12244 return NULL_TREE;
12245 }
12246 if (avoid_folding_inline_builtin (fndecl))
12247 return NULL_TREE;
12248 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12249 return targetm.fold_builtin (fndecl, n, argarray, false);
12250 else
12251 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
12252 }
12253
12254 return NULL_TREE;
12255 }
12256
12257 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
12258 along with N new arguments specified as the "..." parameters. SKIP
12259 is the number of arguments in EXP to be omitted. This function is used
12260 to do varargs-to-varargs transformations. */
12261
12262 static tree
12263 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
12264 {
12265 va_list ap;
12266 tree t;
12267
12268 va_start (ap, n);
12269 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
12270 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
12271 va_end (ap);
12272
12273 return t;
12274 }
12275
12276 /* Validate a single argument ARG against a tree code CODE representing
12277 a type. Return true when argument is valid. */
12278
12279 static bool
12280 validate_arg (const_tree arg, enum tree_code code)
12281 {
12282 if (!arg)
12283 return false;
12284 else if (code == POINTER_TYPE)
12285 return POINTER_TYPE_P (TREE_TYPE (arg));
12286 else if (code == INTEGER_TYPE)
12287 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
12288 return code == TREE_CODE (TREE_TYPE (arg));
12289 }
12290
12291 /* This function validates the types of a function call argument list
12292 against a specified list of tree_codes. If the last specifier is a 0,
12293 that represents an ellipses, otherwise the last specifier must be a
12294 VOID_TYPE.
12295
12296 This is the GIMPLE version of validate_arglist. Eventually we want to
12297 completely convert builtins.c to work from GIMPLEs and the tree based
12298 validate_arglist will then be removed. */
12299
12300 bool
12301 validate_gimple_arglist (const gcall *call, ...)
12302 {
12303 enum tree_code code;
12304 bool res = 0;
12305 va_list ap;
12306 const_tree arg;
12307 size_t i;
12308
12309 va_start (ap, call);
12310 i = 0;
12311
12312 do
12313 {
12314 code = (enum tree_code) va_arg (ap, int);
12315 switch (code)
12316 {
12317 case 0:
12318 /* This signifies an ellipses, any further arguments are all ok. */
12319 res = true;
12320 goto end;
12321 case VOID_TYPE:
12322 /* This signifies an endlink, if no arguments remain, return
12323 true, otherwise return false. */
12324 res = (i == gimple_call_num_args (call));
12325 goto end;
12326 default:
12327 /* If no parameters remain or the parameter's code does not
12328 match the specified code, return false. Otherwise continue
12329 checking any remaining arguments. */
12330 arg = gimple_call_arg (call, i++);
12331 if (!validate_arg (arg, code))
12332 goto end;
12333 break;
12334 }
12335 }
12336 while (1);
12337
12338 /* We need gotos here since we can only have one VA_CLOSE in a
12339 function. */
12340 end: ;
12341 va_end (ap);
12342
12343 return res;
12344 }
12345
12346 /* Default target-specific builtin expander that does nothing. */
12347
12348 rtx
12349 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
12350 rtx target ATTRIBUTE_UNUSED,
12351 rtx subtarget ATTRIBUTE_UNUSED,
12352 machine_mode mode ATTRIBUTE_UNUSED,
12353 int ignore ATTRIBUTE_UNUSED)
12354 {
12355 return NULL_RTX;
12356 }
12357
12358 /* Returns true is EXP represents data that would potentially reside
12359 in a readonly section. */
12360
12361 bool
12362 readonly_data_expr (tree exp)
12363 {
12364 STRIP_NOPS (exp);
12365
12366 if (TREE_CODE (exp) != ADDR_EXPR)
12367 return false;
12368
12369 exp = get_base_address (TREE_OPERAND (exp, 0));
12370 if (!exp)
12371 return false;
12372
12373 /* Make sure we call decl_readonly_section only for trees it
12374 can handle (since it returns true for everything it doesn't
12375 understand). */
12376 if (TREE_CODE (exp) == STRING_CST
12377 || TREE_CODE (exp) == CONSTRUCTOR
12378 || (VAR_P (exp) && TREE_STATIC (exp)))
12379 return decl_readonly_section (exp, 0);
12380 else
12381 return false;
12382 }
12383
12384 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
12385 to the call, and TYPE is its return type.
12386
12387 Return NULL_TREE if no simplification was possible, otherwise return the
12388 simplified form of the call as a tree.
12389
12390 The simplified form may be a constant or other expression which
12391 computes the same value, but in a more efficient manner (including
12392 calls to other builtin functions).
12393
12394 The call may contain arguments which need to be evaluated, but
12395 which are not useful to determine the result of the call. In
12396 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12397 COMPOUND_EXPR will be an argument which must be evaluated.
12398 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12399 COMPOUND_EXPR in the chain will contain the tree for the simplified
12400 form of the builtin function call. */
12401
12402 static tree
12403 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
12404 {
12405 if (!validate_arg (s1, POINTER_TYPE)
12406 || !validate_arg (s2, POINTER_TYPE))
12407 return NULL_TREE;
12408
12409 tree fn;
12410 const char *p1, *p2;
12411
12412 p2 = c_getstr (s2);
12413 if (p2 == NULL)
12414 return NULL_TREE;
12415
12416 p1 = c_getstr (s1);
12417 if (p1 != NULL)
12418 {
12419 const char *r = strpbrk (p1, p2);
12420 tree tem;
12421
12422 if (r == NULL)
12423 return build_int_cst (TREE_TYPE (s1), 0);
12424
12425 /* Return an offset into the constant string argument. */
12426 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
12427 return fold_convert_loc (loc, type, tem);
12428 }
12429
12430 if (p2[0] == '\0')
12431 /* strpbrk(x, "") == NULL.
12432 Evaluate and ignore s1 in case it had side-effects. */
12433 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
12434
12435 if (p2[1] != '\0')
12436 return NULL_TREE; /* Really call strpbrk. */
12437
12438 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
12439 if (!fn)
12440 return NULL_TREE;
12441
12442 /* New argument list transforming strpbrk(s1, s2) to
12443 strchr(s1, s2[0]). */
12444 return build_call_expr_loc (loc, fn, 2, s1,
12445 build_int_cst (integer_type_node, p2[0]));
12446 }
12447
12448 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
12449 to the call.
12450
12451 Return NULL_TREE if no simplification was possible, otherwise return the
12452 simplified form of the call as a tree.
12453
12454 The simplified form may be a constant or other expression which
12455 computes the same value, but in a more efficient manner (including
12456 calls to other builtin functions).
12457
12458 The call may contain arguments which need to be evaluated, but
12459 which are not useful to determine the result of the call. In
12460 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12461 COMPOUND_EXPR will be an argument which must be evaluated.
12462 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12463 COMPOUND_EXPR in the chain will contain the tree for the simplified
12464 form of the builtin function call. */
12465
12466 static tree
12467 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
12468 {
12469 if (!validate_arg (s1, POINTER_TYPE)
12470 || !validate_arg (s2, POINTER_TYPE))
12471 return NULL_TREE;
12472
12473 if (!check_nul_terminated_array (expr, s1)
12474 || !check_nul_terminated_array (expr, s2))
12475 return NULL_TREE;
12476
12477 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12478
12479 /* If either argument is "", return NULL_TREE. */
12480 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
12481 /* Evaluate and ignore both arguments in case either one has
12482 side-effects. */
12483 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
12484 s1, s2);
12485 return NULL_TREE;
12486 }
12487
12488 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12489 to the call.
12490
12491 Return NULL_TREE if no simplification was possible, otherwise return the
12492 simplified form of the call as a tree.
12493
12494 The simplified form may be a constant or other expression which
12495 computes the same value, but in a more efficient manner (including
12496 calls to other builtin functions).
12497
12498 The call may contain arguments which need to be evaluated, but
12499 which are not useful to determine the result of the call. In
12500 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12501 COMPOUND_EXPR will be an argument which must be evaluated.
12502 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12503 COMPOUND_EXPR in the chain will contain the tree for the simplified
12504 form of the builtin function call. */
12505
12506 static tree
12507 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
12508 {
12509 if (!validate_arg (s1, POINTER_TYPE)
12510 || !validate_arg (s2, POINTER_TYPE))
12511 return NULL_TREE;
12512
12513 if (!check_nul_terminated_array (expr, s1)
12514 || !check_nul_terminated_array (expr, s2))
12515 return NULL_TREE;
12516
12517 /* If the first argument is "", return NULL_TREE. */
12518 const char *p1 = c_getstr (s1);
12519 if (p1 && *p1 == '\0')
12520 {
12521 /* Evaluate and ignore argument s2 in case it has
12522 side-effects. */
12523 return omit_one_operand_loc (loc, size_type_node,
12524 size_zero_node, s2);
12525 }
12526
12527 /* If the second argument is "", return __builtin_strlen(s1). */
12528 const char *p2 = c_getstr (s2);
12529 if (p2 && *p2 == '\0')
12530 {
12531 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12532
12533 /* If the replacement _DECL isn't initialized, don't do the
12534 transformation. */
12535 if (!fn)
12536 return NULL_TREE;
12537
12538 return build_call_expr_loc (loc, fn, 1, s1);
12539 }
12540 return NULL_TREE;
12541 }
12542
12543 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12544 produced. False otherwise. This is done so that we don't output the error
12545 or warning twice or three times. */
12546
12547 bool
12548 fold_builtin_next_arg (tree exp, bool va_start_p)
12549 {
12550 tree fntype = TREE_TYPE (current_function_decl);
12551 int nargs = call_expr_nargs (exp);
12552 tree arg;
12553 /* There is good chance the current input_location points inside the
12554 definition of the va_start macro (perhaps on the token for
12555 builtin) in a system header, so warnings will not be emitted.
12556 Use the location in real source code. */
12557 location_t current_location =
12558 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12559 NULL);
12560
12561 if (!stdarg_p (fntype))
12562 {
12563 error ("%<va_start%> used in function with fixed arguments");
12564 return true;
12565 }
12566
12567 if (va_start_p)
12568 {
12569 if (va_start_p && (nargs != 2))
12570 {
12571 error ("wrong number of arguments to function %<va_start%>");
12572 return true;
12573 }
12574 arg = CALL_EXPR_ARG (exp, 1);
12575 }
12576 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12577 when we checked the arguments and if needed issued a warning. */
12578 else
12579 {
12580 if (nargs == 0)
12581 {
12582 /* Evidently an out of date version of <stdarg.h>; can't validate
12583 va_start's second argument, but can still work as intended. */
12584 warning_at (current_location,
12585 OPT_Wvarargs,
12586 "%<__builtin_next_arg%> called without an argument");
12587 return true;
12588 }
12589 else if (nargs > 1)
12590 {
12591 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12592 return true;
12593 }
12594 arg = CALL_EXPR_ARG (exp, 0);
12595 }
12596
12597 if (TREE_CODE (arg) == SSA_NAME)
12598 arg = SSA_NAME_VAR (arg);
12599
12600 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12601 or __builtin_next_arg (0) the first time we see it, after checking
12602 the arguments and if needed issuing a warning. */
12603 if (!integer_zerop (arg))
12604 {
12605 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12606
12607 /* Strip off all nops for the sake of the comparison. This
12608 is not quite the same as STRIP_NOPS. It does more.
12609 We must also strip off INDIRECT_EXPR for C++ reference
12610 parameters. */
12611 while (CONVERT_EXPR_P (arg)
12612 || TREE_CODE (arg) == INDIRECT_REF)
12613 arg = TREE_OPERAND (arg, 0);
12614 if (arg != last_parm)
12615 {
12616 /* FIXME: Sometimes with the tree optimizers we can get the
12617 not the last argument even though the user used the last
12618 argument. We just warn and set the arg to be the last
12619 argument so that we will get wrong-code because of
12620 it. */
12621 warning_at (current_location,
12622 OPT_Wvarargs,
12623 "second parameter of %<va_start%> not last named argument");
12624 }
12625
12626 /* Undefined by C99 7.15.1.4p4 (va_start):
12627 "If the parameter parmN is declared with the register storage
12628 class, with a function or array type, or with a type that is
12629 not compatible with the type that results after application of
12630 the default argument promotions, the behavior is undefined."
12631 */
12632 else if (DECL_REGISTER (arg))
12633 {
12634 warning_at (current_location,
12635 OPT_Wvarargs,
12636 "undefined behavior when second parameter of "
12637 "%<va_start%> is declared with %<register%> storage");
12638 }
12639
12640 /* We want to verify the second parameter just once before the tree
12641 optimizers are run and then avoid keeping it in the tree,
12642 as otherwise we could warn even for correct code like:
12643 void foo (int i, ...)
12644 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12645 if (va_start_p)
12646 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12647 else
12648 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12649 }
12650 return false;
12651 }
12652
12653
12654 /* Expand a call EXP to __builtin_object_size. */
12655
12656 static rtx
12657 expand_builtin_object_size (tree exp)
12658 {
12659 tree ost;
12660 int object_size_type;
12661 tree fndecl = get_callee_fndecl (exp);
12662
12663 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12664 {
12665 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
12666 exp, fndecl);
12667 expand_builtin_trap ();
12668 return const0_rtx;
12669 }
12670
12671 ost = CALL_EXPR_ARG (exp, 1);
12672 STRIP_NOPS (ost);
12673
12674 if (TREE_CODE (ost) != INTEGER_CST
12675 || tree_int_cst_sgn (ost) < 0
12676 || compare_tree_int (ost, 3) > 0)
12677 {
12678 error ("%Klast argument of %qD is not integer constant between 0 and 3",
12679 exp, fndecl);
12680 expand_builtin_trap ();
12681 return const0_rtx;
12682 }
12683
12684 object_size_type = tree_to_shwi (ost);
12685
12686 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12687 }
12688
12689 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12690 FCODE is the BUILT_IN_* to use.
12691 Return NULL_RTX if we failed; the caller should emit a normal call,
12692 otherwise try to get the result in TARGET, if convenient (and in
12693 mode MODE if that's convenient). */
12694
12695 static rtx
12696 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
12697 enum built_in_function fcode)
12698 {
12699 if (!validate_arglist (exp,
12700 POINTER_TYPE,
12701 fcode == BUILT_IN_MEMSET_CHK
12702 ? INTEGER_TYPE : POINTER_TYPE,
12703 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12704 return NULL_RTX;
12705
12706 tree dest = CALL_EXPR_ARG (exp, 0);
12707 tree src = CALL_EXPR_ARG (exp, 1);
12708 tree len = CALL_EXPR_ARG (exp, 2);
12709 tree size = CALL_EXPR_ARG (exp, 3);
12710
12711 /* FIXME: Set access mode to write only for memset et al. */
12712 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
12713 /*srcstr=*/NULL_TREE, size, access_read_write);
12714
12715 if (!tree_fits_uhwi_p (size))
12716 return NULL_RTX;
12717
12718 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12719 {
12720 /* Avoid transforming the checking call to an ordinary one when
12721 an overflow has been detected or when the call couldn't be
12722 validated because the size is not constant. */
12723 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
12724 return NULL_RTX;
12725
12726 tree fn = NULL_TREE;
12727 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12728 mem{cpy,pcpy,move,set} is available. */
12729 switch (fcode)
12730 {
12731 case BUILT_IN_MEMCPY_CHK:
12732 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12733 break;
12734 case BUILT_IN_MEMPCPY_CHK:
12735 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12736 break;
12737 case BUILT_IN_MEMMOVE_CHK:
12738 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12739 break;
12740 case BUILT_IN_MEMSET_CHK:
12741 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12742 break;
12743 default:
12744 break;
12745 }
12746
12747 if (! fn)
12748 return NULL_RTX;
12749
12750 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12751 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12752 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12753 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12754 }
12755 else if (fcode == BUILT_IN_MEMSET_CHK)
12756 return NULL_RTX;
12757 else
12758 {
12759 unsigned int dest_align = get_pointer_alignment (dest);
12760
12761 /* If DEST is not a pointer type, call the normal function. */
12762 if (dest_align == 0)
12763 return NULL_RTX;
12764
12765 /* If SRC and DEST are the same (and not volatile), do nothing. */
12766 if (operand_equal_p (src, dest, 0))
12767 {
12768 tree expr;
12769
12770 if (fcode != BUILT_IN_MEMPCPY_CHK)
12771 {
12772 /* Evaluate and ignore LEN in case it has side-effects. */
12773 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12774 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12775 }
12776
12777 expr = fold_build_pointer_plus (dest, len);
12778 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12779 }
12780
12781 /* __memmove_chk special case. */
12782 if (fcode == BUILT_IN_MEMMOVE_CHK)
12783 {
12784 unsigned int src_align = get_pointer_alignment (src);
12785
12786 if (src_align == 0)
12787 return NULL_RTX;
12788
12789 /* If src is categorized for a readonly section we can use
12790 normal __memcpy_chk. */
12791 if (readonly_data_expr (src))
12792 {
12793 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12794 if (!fn)
12795 return NULL_RTX;
12796 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12797 dest, src, len, size);
12798 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12799 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12800 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12801 }
12802 }
12803 return NULL_RTX;
12804 }
12805 }
12806
12807 /* Emit warning if a buffer overflow is detected at compile time. */
12808
12809 static void
12810 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12811 {
12812 /* The source string. */
12813 tree srcstr = NULL_TREE;
12814 /* The size of the destination object returned by __builtin_object_size. */
12815 tree objsize = NULL_TREE;
12816 /* The string that is being concatenated with (as in __strcat_chk)
12817 or null if it isn't. */
12818 tree catstr = NULL_TREE;
12819 /* The maximum length of the source sequence in a bounded operation
12820 (such as __strncat_chk) or null if the operation isn't bounded
12821 (such as __strcat_chk). */
12822 tree maxread = NULL_TREE;
12823 /* The exact size of the access (such as in __strncpy_chk). */
12824 tree size = NULL_TREE;
12825 /* The access by the function that's checked. Except for snprintf
12826 both writing and reading is checked. */
12827 access_mode mode = access_read_write;
12828
12829 switch (fcode)
12830 {
12831 case BUILT_IN_STRCPY_CHK:
12832 case BUILT_IN_STPCPY_CHK:
12833 srcstr = CALL_EXPR_ARG (exp, 1);
12834 objsize = CALL_EXPR_ARG (exp, 2);
12835 break;
12836
12837 case BUILT_IN_STRCAT_CHK:
12838 /* For __strcat_chk the warning will be emitted only if overflowing
12839 by at least strlen (dest) + 1 bytes. */
12840 catstr = CALL_EXPR_ARG (exp, 0);
12841 srcstr = CALL_EXPR_ARG (exp, 1);
12842 objsize = CALL_EXPR_ARG (exp, 2);
12843 break;
12844
12845 case BUILT_IN_STRNCAT_CHK:
12846 catstr = CALL_EXPR_ARG (exp, 0);
12847 srcstr = CALL_EXPR_ARG (exp, 1);
12848 maxread = CALL_EXPR_ARG (exp, 2);
12849 objsize = CALL_EXPR_ARG (exp, 3);
12850 break;
12851
12852 case BUILT_IN_STRNCPY_CHK:
12853 case BUILT_IN_STPNCPY_CHK:
12854 srcstr = CALL_EXPR_ARG (exp, 1);
12855 size = CALL_EXPR_ARG (exp, 2);
12856 objsize = CALL_EXPR_ARG (exp, 3);
12857 break;
12858
12859 case BUILT_IN_SNPRINTF_CHK:
12860 case BUILT_IN_VSNPRINTF_CHK:
12861 maxread = CALL_EXPR_ARG (exp, 1);
12862 objsize = CALL_EXPR_ARG (exp, 3);
12863 /* The only checked access the write to the destination. */
12864 mode = access_write_only;
12865 break;
12866 default:
12867 gcc_unreachable ();
12868 }
12869
12870 if (catstr && maxread)
12871 {
12872 /* Check __strncat_chk. There is no way to determine the length
12873 of the string to which the source string is being appended so
12874 just warn when the length of the source string is not known. */
12875 check_strncat_sizes (exp, objsize);
12876 return;
12877 }
12878
12879 check_access (exp, size, maxread, srcstr, objsize, mode);
12880 }
12881
12882 /* Emit warning if a buffer overflow is detected at compile time
12883 in __sprintf_chk/__vsprintf_chk calls. */
12884
12885 static void
12886 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12887 {
12888 tree size, len, fmt;
12889 const char *fmt_str;
12890 int nargs = call_expr_nargs (exp);
12891
12892 /* Verify the required arguments in the original call. */
12893
12894 if (nargs < 4)
12895 return;
12896 size = CALL_EXPR_ARG (exp, 2);
12897 fmt = CALL_EXPR_ARG (exp, 3);
12898
12899 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12900 return;
12901
12902 /* Check whether the format is a literal string constant. */
12903 fmt_str = c_getstr (fmt);
12904 if (fmt_str == NULL)
12905 return;
12906
12907 if (!init_target_chars ())
12908 return;
12909
12910 /* If the format doesn't contain % args or %%, we know its size. */
12911 if (strchr (fmt_str, target_percent) == 0)
12912 len = build_int_cstu (size_type_node, strlen (fmt_str));
12913 /* If the format is "%s" and first ... argument is a string literal,
12914 we know it too. */
12915 else if (fcode == BUILT_IN_SPRINTF_CHK
12916 && strcmp (fmt_str, target_percent_s) == 0)
12917 {
12918 tree arg;
12919
12920 if (nargs < 5)
12921 return;
12922 arg = CALL_EXPR_ARG (exp, 4);
12923 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12924 return;
12925
12926 len = c_strlen (arg, 1);
12927 if (!len || ! tree_fits_uhwi_p (len))
12928 return;
12929 }
12930 else
12931 return;
12932
12933 /* Add one for the terminating nul. */
12934 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
12935
12936 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
12937 access_write_only);
12938 }
12939
12940 /* Return true if STMT is a call to an allocation function. Unless
12941 ALL_ALLOC is set, consider only functions that return dynmamically
12942 allocated objects. Otherwise return true even for all forms of
12943 alloca (including VLA). */
12944
12945 static bool
12946 fndecl_alloc_p (tree fndecl, bool all_alloc)
12947 {
12948 if (!fndecl)
12949 return false;
12950
12951 /* A call to operator new isn't recognized as one to a built-in. */
12952 if (DECL_IS_OPERATOR_NEW_P (fndecl))
12953 return true;
12954
12955 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12956 {
12957 switch (DECL_FUNCTION_CODE (fndecl))
12958 {
12959 case BUILT_IN_ALLOCA:
12960 case BUILT_IN_ALLOCA_WITH_ALIGN:
12961 return all_alloc;
12962 case BUILT_IN_ALIGNED_ALLOC:
12963 case BUILT_IN_CALLOC:
12964 case BUILT_IN_GOMP_ALLOC:
12965 case BUILT_IN_MALLOC:
12966 case BUILT_IN_REALLOC:
12967 case BUILT_IN_STRDUP:
12968 case BUILT_IN_STRNDUP:
12969 return true;
12970 default:
12971 break;
12972 }
12973 }
12974
12975 /* A function is considered an allocation function if it's declared
12976 with attribute malloc with an argument naming its associated
12977 deallocation function. */
12978 tree attrs = DECL_ATTRIBUTES (fndecl);
12979 if (!attrs)
12980 return false;
12981
12982 for (tree allocs = attrs;
12983 (allocs = lookup_attribute ("malloc", allocs));
12984 allocs = TREE_CHAIN (allocs))
12985 {
12986 tree args = TREE_VALUE (allocs);
12987 if (!args)
12988 continue;
12989
12990 if (TREE_VALUE (args))
12991 return true;
12992 }
12993
12994 return false;
12995 }
12996
12997 /* Return true if STMT is a call to an allocation function. A wrapper
12998 around fndecl_alloc_p. */
12999
13000 static bool
13001 gimple_call_alloc_p (gimple *stmt, bool all_alloc = false)
13002 {
13003 return fndecl_alloc_p (gimple_call_fndecl (stmt), all_alloc);
13004 }
13005
13006 /* Return the zero-based number corresponding to the argument being
13007 deallocated if STMT is a call to a deallocation function or UINT_MAX
13008 if it isn't. */
13009
13010 static unsigned
13011 call_dealloc_argno (tree exp)
13012 {
13013 tree fndecl = get_callee_fndecl (exp);
13014 if (!fndecl)
13015 return UINT_MAX;
13016
13017 return fndecl_dealloc_argno (fndecl);
13018 }
13019
13020 /* Return the zero-based number corresponding to the argument being
13021 deallocated if FNDECL is a deallocation function or UINT_MAX
13022 if it isn't. */
13023
13024 unsigned
13025 fndecl_dealloc_argno (tree fndecl)
13026 {
13027 /* A call to operator delete isn't recognized as one to a built-in. */
13028 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
13029 return 0;
13030
13031 /* TODO: Handle user-defined functions with attribute malloc? Handle
13032 known non-built-ins like fopen? */
13033 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13034 {
13035 switch (DECL_FUNCTION_CODE (fndecl))
13036 {
13037 case BUILT_IN_FREE:
13038 case BUILT_IN_REALLOC:
13039 return 0;
13040 default:
13041 break;
13042 }
13043 return UINT_MAX;
13044 }
13045
13046 tree attrs = DECL_ATTRIBUTES (fndecl);
13047 if (!attrs)
13048 return UINT_MAX;
13049
13050 for (tree atfree = attrs;
13051 (atfree = lookup_attribute ("*dealloc", atfree));
13052 atfree = TREE_CHAIN (atfree))
13053 {
13054 tree alloc = TREE_VALUE (atfree);
13055 if (!alloc)
13056 continue;
13057
13058 tree pos = TREE_CHAIN (alloc);
13059 if (!pos)
13060 return 0;
13061
13062 pos = TREE_VALUE (pos);
13063 return TREE_INT_CST_LOW (pos) - 1;
13064 }
13065
13066 return UINT_MAX;
13067 }
13068
13069 /* Return true if DELC doesn't refer to an operator delete that's
13070 suitable to call with a pointer returned from the operator new
13071 described by NEWC. */
13072
13073 static bool
13074 new_delete_mismatch_p (const demangle_component &newc,
13075 const demangle_component &delc)
13076 {
13077 if (newc.type != delc.type)
13078 return true;
13079
13080 switch (newc.type)
13081 {
13082 case DEMANGLE_COMPONENT_NAME:
13083 {
13084 int len = newc.u.s_name.len;
13085 const char *news = newc.u.s_name.s;
13086 const char *dels = delc.u.s_name.s;
13087 if (len != delc.u.s_name.len || memcmp (news, dels, len))
13088 return true;
13089
13090 if (news[len] == 'n')
13091 {
13092 if (news[len + 1] == 'a')
13093 return dels[len] != 'd' || dels[len + 1] != 'a';
13094 if (news[len + 1] == 'w')
13095 return dels[len] != 'd' || dels[len + 1] != 'l';
13096 }
13097 return false;
13098 }
13099
13100 case DEMANGLE_COMPONENT_OPERATOR:
13101 /* Operator mismatches are handled above. */
13102 return false;
13103
13104 case DEMANGLE_COMPONENT_EXTENDED_OPERATOR:
13105 if (newc.u.s_extended_operator.args != delc.u.s_extended_operator.args)
13106 return true;
13107 return new_delete_mismatch_p (*newc.u.s_extended_operator.name,
13108 *delc.u.s_extended_operator.name);
13109
13110 case DEMANGLE_COMPONENT_FIXED_TYPE:
13111 if (newc.u.s_fixed.accum != delc.u.s_fixed.accum
13112 || newc.u.s_fixed.sat != delc.u.s_fixed.sat)
13113 return true;
13114 return new_delete_mismatch_p (*newc.u.s_fixed.length,
13115 *delc.u.s_fixed.length);
13116
13117 case DEMANGLE_COMPONENT_CTOR:
13118 if (newc.u.s_ctor.kind != delc.u.s_ctor.kind)
13119 return true;
13120 return new_delete_mismatch_p (*newc.u.s_ctor.name,
13121 *delc.u.s_ctor.name);
13122
13123 case DEMANGLE_COMPONENT_DTOR:
13124 if (newc.u.s_dtor.kind != delc.u.s_dtor.kind)
13125 return true;
13126 return new_delete_mismatch_p (*newc.u.s_dtor.name,
13127 *delc.u.s_dtor.name);
13128
13129 case DEMANGLE_COMPONENT_BUILTIN_TYPE:
13130 {
13131 /* The demangler API provides no better way to compare built-in
13132 types except to by comparing their demangled names. */
13133 size_t nsz, dsz;
13134 demangle_component *pnc = const_cast<demangle_component *>(&newc);
13135 demangle_component *pdc = const_cast<demangle_component *>(&delc);
13136 char *nts = cplus_demangle_print (0, pnc, 16, &nsz);
13137 char *dts = cplus_demangle_print (0, pdc, 16, &dsz);
13138 if (!nts != !dts)
13139 return true;
13140 bool mismatch = strcmp (nts, dts);
13141 free (nts);
13142 free (dts);
13143 return mismatch;
13144 }
13145
13146 case DEMANGLE_COMPONENT_SUB_STD:
13147 if (newc.u.s_string.len != delc.u.s_string.len)
13148 return true;
13149 return memcmp (newc.u.s_string.string, delc.u.s_string.string,
13150 newc.u.s_string.len);
13151
13152 case DEMANGLE_COMPONENT_FUNCTION_PARAM:
13153 case DEMANGLE_COMPONENT_TEMPLATE_PARAM:
13154 return newc.u.s_number.number != delc.u.s_number.number;
13155
13156 case DEMANGLE_COMPONENT_CHARACTER:
13157 return newc.u.s_character.character != delc.u.s_character.character;
13158
13159 case DEMANGLE_COMPONENT_DEFAULT_ARG:
13160 case DEMANGLE_COMPONENT_LAMBDA:
13161 if (newc.u.s_unary_num.num != delc.u.s_unary_num.num)
13162 return true;
13163 return new_delete_mismatch_p (*newc.u.s_unary_num.sub,
13164 *delc.u.s_unary_num.sub);
13165 default:
13166 break;
13167 }
13168
13169 if (!newc.u.s_binary.left != !delc.u.s_binary.left)
13170 return true;
13171
13172 if (!newc.u.s_binary.left)
13173 return false;
13174
13175 if (new_delete_mismatch_p (*newc.u.s_binary.left, *delc.u.s_binary.left)
13176 || !newc.u.s_binary.right != !delc.u.s_binary.right)
13177 return true;
13178
13179 if (newc.u.s_binary.right)
13180 return new_delete_mismatch_p (*newc.u.s_binary.right,
13181 *delc.u.s_binary.right);
13182 return false;
13183 }
13184
13185 /* Return true if DELETE_DECL is an operator delete that's not suitable
13186 to call with a pointer returned fron NEW_DECL. */
13187
13188 static bool
13189 new_delete_mismatch_p (tree new_decl, tree delete_decl)
13190 {
13191 tree new_name = DECL_ASSEMBLER_NAME (new_decl);
13192 tree delete_name = DECL_ASSEMBLER_NAME (delete_decl);
13193
13194 /* valid_new_delete_pair_p() returns a conservative result (currently
13195 it only handles global operators). A true result is reliable but
13196 a false result doesn't necessarily mean the operators don't match. */
13197 if (valid_new_delete_pair_p (new_name, delete_name))
13198 return false;
13199
13200 /* For anything not handled by valid_new_delete_pair_p() such as member
13201 operators compare the individual demangled components of the mangled
13202 name. */
13203 const char *new_str = IDENTIFIER_POINTER (new_name);
13204 const char *del_str = IDENTIFIER_POINTER (delete_name);
13205
13206 void *np = NULL, *dp = NULL;
13207 demangle_component *ndc = cplus_demangle_v3_components (new_str, 0, &np);
13208 demangle_component *ddc = cplus_demangle_v3_components (del_str, 0, &dp);
13209 bool mismatch = new_delete_mismatch_p (*ndc, *ddc);
13210 free (np);
13211 free (dp);
13212 return mismatch;
13213 }
13214
13215 /* ALLOC_DECL and DEALLOC_DECL are pair of allocation and deallocation
13216 functions. Return true if the latter is suitable to deallocate objects
13217 allocated by calls to the former. */
13218
13219 static bool
13220 matching_alloc_calls_p (tree alloc_decl, tree dealloc_decl)
13221 {
13222 /* Set to alloc_kind_t::builtin if ALLOC_DECL is associated with
13223 a built-in deallocator. */
13224 enum class alloc_kind_t { none, builtin, user }
13225 alloc_dealloc_kind = alloc_kind_t::none;
13226
13227 if (DECL_IS_OPERATOR_NEW_P (alloc_decl))
13228 {
13229 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13230 /* Return true iff both functions are of the same array or
13231 singleton form and false otherwise. */
13232 return !new_delete_mismatch_p (alloc_decl, dealloc_decl);
13233
13234 /* Return false for deallocation functions that are known not
13235 to match. */
13236 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13237 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13238 return false;
13239 /* Otherwise proceed below to check the deallocation function's
13240 "*dealloc" attributes to look for one that mentions this operator
13241 new. */
13242 }
13243 else if (fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL))
13244 {
13245 switch (DECL_FUNCTION_CODE (alloc_decl))
13246 {
13247 case BUILT_IN_ALLOCA:
13248 case BUILT_IN_ALLOCA_WITH_ALIGN:
13249 return false;
13250
13251 case BUILT_IN_ALIGNED_ALLOC:
13252 case BUILT_IN_CALLOC:
13253 case BUILT_IN_GOMP_ALLOC:
13254 case BUILT_IN_MALLOC:
13255 case BUILT_IN_REALLOC:
13256 case BUILT_IN_STRDUP:
13257 case BUILT_IN_STRNDUP:
13258 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13259 return false;
13260
13261 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13262 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13263 return true;
13264
13265 alloc_dealloc_kind = alloc_kind_t::builtin;
13266 break;
13267
13268 default:
13269 break;
13270 }
13271 }
13272
13273 /* Set if DEALLOC_DECL both allocates and deallocates. */
13274 alloc_kind_t realloc_kind = alloc_kind_t::none;
13275
13276 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_NORMAL))
13277 {
13278 built_in_function dealloc_code = DECL_FUNCTION_CODE (dealloc_decl);
13279 if (dealloc_code == BUILT_IN_REALLOC)
13280 realloc_kind = alloc_kind_t::builtin;
13281
13282 for (tree amats = DECL_ATTRIBUTES (alloc_decl);
13283 (amats = lookup_attribute ("malloc", amats));
13284 amats = TREE_CHAIN (amats))
13285 {
13286 tree args = TREE_VALUE (amats);
13287 if (!args)
13288 continue;
13289
13290 tree fndecl = TREE_VALUE (args);
13291 if (!fndecl || !DECL_P (fndecl))
13292 continue;
13293
13294 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
13295 && dealloc_code == DECL_FUNCTION_CODE (fndecl))
13296 return true;
13297 }
13298 }
13299
13300 const bool alloc_builtin = fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL);
13301 alloc_kind_t realloc_dealloc_kind = alloc_kind_t::none;
13302
13303 /* If DEALLOC_DECL has an internal "*dealloc" attribute scan the list
13304 of its associated allocation functions for ALLOC_DECL.
13305 If the corresponding ALLOC_DECL is found they're a matching pair,
13306 otherwise they're not.
13307 With DDATS set to the Deallocator's *Dealloc ATtributes... */
13308 for (tree ddats = DECL_ATTRIBUTES (dealloc_decl);
13309 (ddats = lookup_attribute ("*dealloc", ddats));
13310 ddats = TREE_CHAIN (ddats))
13311 {
13312 tree args = TREE_VALUE (ddats);
13313 if (!args)
13314 continue;
13315
13316 tree alloc = TREE_VALUE (args);
13317 if (!alloc)
13318 continue;
13319
13320 if (alloc == DECL_NAME (dealloc_decl))
13321 realloc_kind = alloc_kind_t::user;
13322
13323 if (DECL_P (alloc))
13324 {
13325 gcc_checking_assert (fndecl_built_in_p (alloc, BUILT_IN_NORMAL));
13326
13327 switch (DECL_FUNCTION_CODE (alloc))
13328 {
13329 case BUILT_IN_ALIGNED_ALLOC:
13330 case BUILT_IN_CALLOC:
13331 case BUILT_IN_GOMP_ALLOC:
13332 case BUILT_IN_MALLOC:
13333 case BUILT_IN_REALLOC:
13334 case BUILT_IN_STRDUP:
13335 case BUILT_IN_STRNDUP:
13336 realloc_dealloc_kind = alloc_kind_t::builtin;
13337 break;
13338 default:
13339 break;
13340 }
13341
13342 if (!alloc_builtin)
13343 continue;
13344
13345 if (DECL_FUNCTION_CODE (alloc) != DECL_FUNCTION_CODE (alloc_decl))
13346 continue;
13347
13348 return true;
13349 }
13350
13351 if (alloc == DECL_NAME (alloc_decl))
13352 return true;
13353 }
13354
13355 if (realloc_kind == alloc_kind_t::none)
13356 return false;
13357
13358 hash_set<tree> common_deallocs;
13359 /* Special handling for deallocators. Iterate over both the allocator's
13360 and the reallocator's associated deallocator functions looking for
13361 the first one in common. If one is found, the de/reallocator is
13362 a match for the allocator even though the latter isn't directly
13363 associated with the former. This simplifies declarations in system
13364 headers.
13365 With AMATS set to the Allocator's Malloc ATtributes,
13366 and RMATS set to Reallocator's Malloc ATtributes... */
13367 for (tree amats = DECL_ATTRIBUTES (alloc_decl),
13368 rmats = DECL_ATTRIBUTES (dealloc_decl);
13369 (amats = lookup_attribute ("malloc", amats))
13370 || (rmats = lookup_attribute ("malloc", rmats));
13371 amats = amats ? TREE_CHAIN (amats) : NULL_TREE,
13372 rmats = rmats ? TREE_CHAIN (rmats) : NULL_TREE)
13373 {
13374 if (tree args = amats ? TREE_VALUE (amats) : NULL_TREE)
13375 if (tree adealloc = TREE_VALUE (args))
13376 {
13377 if (DECL_P (adealloc)
13378 && fndecl_built_in_p (adealloc, BUILT_IN_NORMAL))
13379 {
13380 built_in_function fncode = DECL_FUNCTION_CODE (adealloc);
13381 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13382 {
13383 if (realloc_kind == alloc_kind_t::builtin)
13384 return true;
13385 alloc_dealloc_kind = alloc_kind_t::builtin;
13386 }
13387 continue;
13388 }
13389
13390 common_deallocs.add (adealloc);
13391 }
13392
13393 if (tree args = rmats ? TREE_VALUE (rmats) : NULL_TREE)
13394 if (tree ddealloc = TREE_VALUE (args))
13395 {
13396 if (DECL_P (ddealloc)
13397 && fndecl_built_in_p (ddealloc, BUILT_IN_NORMAL))
13398 {
13399 built_in_function fncode = DECL_FUNCTION_CODE (ddealloc);
13400 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13401 {
13402 if (alloc_dealloc_kind == alloc_kind_t::builtin)
13403 return true;
13404 realloc_dealloc_kind = alloc_kind_t::builtin;
13405 }
13406 continue;
13407 }
13408
13409 if (common_deallocs.add (ddealloc))
13410 return true;
13411 }
13412 }
13413
13414 /* Succeed only if ALLOC_DECL and the reallocator DEALLOC_DECL share
13415 a built-in deallocator. */
13416 return (alloc_dealloc_kind == alloc_kind_t::builtin
13417 && realloc_dealloc_kind == alloc_kind_t::builtin);
13418 }
13419
13420 /* Return true if DEALLOC_DECL is a function suitable to deallocate
13421 objectes allocated by the ALLOC call. */
13422
13423 static bool
13424 matching_alloc_calls_p (gimple *alloc, tree dealloc_decl)
13425 {
13426 tree alloc_decl = gimple_call_fndecl (alloc);
13427 if (!alloc_decl)
13428 return true;
13429
13430 return matching_alloc_calls_p (alloc_decl, dealloc_decl);
13431 }
13432
13433 /* Diagnose a call EXP to deallocate a pointer referenced by AREF if it
13434 includes a nonzero offset. Such a pointer cannot refer to the beginning
13435 of an allocated object. A negative offset may refer to it only if
13436 the target pointer is unknown. */
13437
13438 static bool
13439 warn_dealloc_offset (location_t loc, tree exp, const access_ref &aref)
13440 {
13441 if (aref.deref || aref.offrng[0] <= 0 || aref.offrng[1] <= 0)
13442 return false;
13443
13444 tree dealloc_decl = get_callee_fndecl (exp);
13445 if (!dealloc_decl)
13446 return false;
13447
13448 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13449 && !DECL_IS_REPLACEABLE_OPERATOR (dealloc_decl))
13450 {
13451 /* A call to a user-defined operator delete with a pointer plus offset
13452 may be valid if it's returned from an unknown function (i.e., one
13453 that's not operator new). */
13454 if (TREE_CODE (aref.ref) == SSA_NAME)
13455 {
13456 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13457 if (is_gimple_call (def_stmt))
13458 {
13459 tree alloc_decl = gimple_call_fndecl (def_stmt);
13460 if (!alloc_decl || !DECL_IS_OPERATOR_NEW_P (alloc_decl))
13461 return false;
13462 }
13463 }
13464 }
13465
13466 char offstr[80];
13467 offstr[0] = '\0';
13468 if (wi::fits_shwi_p (aref.offrng[0]))
13469 {
13470 if (aref.offrng[0] == aref.offrng[1]
13471 || !wi::fits_shwi_p (aref.offrng[1]))
13472 sprintf (offstr, " %lli",
13473 (long long)aref.offrng[0].to_shwi ());
13474 else
13475 sprintf (offstr, " [%lli, %lli]",
13476 (long long)aref.offrng[0].to_shwi (),
13477 (long long)aref.offrng[1].to_shwi ());
13478 }
13479
13480 if (!warning_at (loc, OPT_Wfree_nonheap_object,
13481 "%K%qD called on pointer %qE with nonzero offset%s",
13482 exp, dealloc_decl, aref.ref, offstr))
13483 return false;
13484
13485 if (DECL_P (aref.ref))
13486 inform (DECL_SOURCE_LOCATION (aref.ref), "declared here");
13487 else if (TREE_CODE (aref.ref) == SSA_NAME)
13488 {
13489 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13490 if (is_gimple_call (def_stmt))
13491 {
13492 location_t def_loc = gimple_location (def_stmt);
13493 tree alloc_decl = gimple_call_fndecl (def_stmt);
13494 if (alloc_decl)
13495 inform (def_loc,
13496 "returned from %qD", alloc_decl);
13497 else if (tree alloc_fntype = gimple_call_fntype (def_stmt))
13498 inform (def_loc,
13499 "returned from %qT", alloc_fntype);
13500 else
13501 inform (def_loc, "obtained here");
13502 }
13503 }
13504
13505 return true;
13506 }
13507
13508 /* Issue a warning if a deallocation function such as free, realloc,
13509 or C++ operator delete is called with an argument not returned by
13510 a matching allocation function such as malloc or the corresponding
13511 form of C++ operatorn new. */
13512
13513 void
13514 maybe_emit_free_warning (tree exp)
13515 {
13516 tree fndecl = get_callee_fndecl (exp);
13517 if (!fndecl)
13518 return;
13519
13520 unsigned argno = call_dealloc_argno (exp);
13521 if ((unsigned) call_expr_nargs (exp) <= argno)
13522 return;
13523
13524 tree ptr = CALL_EXPR_ARG (exp, argno);
13525 if (integer_zerop (ptr))
13526 return;
13527
13528 access_ref aref;
13529 if (!compute_objsize (ptr, 0, &aref))
13530 return;
13531
13532 tree ref = aref.ref;
13533 if (integer_zerop (ref))
13534 return;
13535
13536 tree dealloc_decl = get_callee_fndecl (exp);
13537 location_t loc = tree_inlined_location (exp);
13538
13539 if (DECL_P (ref) || EXPR_P (ref))
13540 {
13541 /* Diagnose freeing a declared object. */
13542 if (aref.ref_declared ()
13543 && warning_at (loc, OPT_Wfree_nonheap_object,
13544 "%K%qD called on unallocated object %qD",
13545 exp, dealloc_decl, ref))
13546 {
13547 loc = (DECL_P (ref)
13548 ? DECL_SOURCE_LOCATION (ref)
13549 : EXPR_LOCATION (ref));
13550 inform (loc, "declared here");
13551 return;
13552 }
13553
13554 /* Diagnose freeing a pointer that includes a positive offset.
13555 Such a pointer cannot refer to the beginning of an allocated
13556 object. A negative offset may refer to it. */
13557 if (aref.sizrng[0] != aref.sizrng[1]
13558 && warn_dealloc_offset (loc, exp, aref))
13559 return;
13560 }
13561 else if (CONSTANT_CLASS_P (ref))
13562 {
13563 if (warning_at (loc, OPT_Wfree_nonheap_object,
13564 "%K%qD called on a pointer to an unallocated "
13565 "object %qE", exp, dealloc_decl, ref))
13566 {
13567 if (TREE_CODE (ptr) == SSA_NAME)
13568 {
13569 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
13570 if (is_gimple_assign (def_stmt))
13571 {
13572 location_t loc = gimple_location (def_stmt);
13573 inform (loc, "assigned here");
13574 }
13575 }
13576 return;
13577 }
13578 }
13579 else if (TREE_CODE (ref) == SSA_NAME)
13580 {
13581 /* Also warn if the pointer argument refers to the result
13582 of an allocation call like alloca or VLA. */
13583 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
13584 if (is_gimple_call (def_stmt))
13585 {
13586 bool warned = false;
13587 if (gimple_call_alloc_p (def_stmt))
13588 {
13589 if (matching_alloc_calls_p (def_stmt, dealloc_decl))
13590 {
13591 if (warn_dealloc_offset (loc, exp, aref))
13592 return;
13593 }
13594 else
13595 {
13596 tree alloc_decl = gimple_call_fndecl (def_stmt);
13597 int opt = (DECL_IS_OPERATOR_NEW_P (alloc_decl)
13598 || DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13599 ? OPT_Wmismatched_new_delete
13600 : OPT_Wmismatched_dealloc);
13601 warned = warning_at (loc, opt,
13602 "%K%qD called on pointer returned "
13603 "from a mismatched allocation "
13604 "function", exp, dealloc_decl);
13605 }
13606 }
13607 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_ALLOCA)
13608 || gimple_call_builtin_p (def_stmt,
13609 BUILT_IN_ALLOCA_WITH_ALIGN))
13610 warned = warning_at (loc, OPT_Wfree_nonheap_object,
13611 "%K%qD called on pointer to "
13612 "an unallocated object",
13613 exp, dealloc_decl);
13614 else if (warn_dealloc_offset (loc, exp, aref))
13615 return;
13616
13617 if (warned)
13618 {
13619 tree fndecl = gimple_call_fndecl (def_stmt);
13620 inform (gimple_location (def_stmt),
13621 "returned from %qD", fndecl);
13622 return;
13623 }
13624 }
13625 else if (gimple_nop_p (def_stmt))
13626 {
13627 ref = SSA_NAME_VAR (ref);
13628 /* Diagnose freeing a pointer that includes a positive offset. */
13629 if (TREE_CODE (ref) == PARM_DECL
13630 && !aref.deref
13631 && aref.sizrng[0] != aref.sizrng[1]
13632 && aref.offrng[0] > 0 && aref.offrng[1] > 0
13633 && warn_dealloc_offset (loc, exp, aref))
13634 return;
13635 }
13636 }
13637 }
13638
13639 /* Fold a call to __builtin_object_size with arguments PTR and OST,
13640 if possible. */
13641
13642 static tree
13643 fold_builtin_object_size (tree ptr, tree ost)
13644 {
13645 unsigned HOST_WIDE_INT bytes;
13646 int object_size_type;
13647
13648 if (!validate_arg (ptr, POINTER_TYPE)
13649 || !validate_arg (ost, INTEGER_TYPE))
13650 return NULL_TREE;
13651
13652 STRIP_NOPS (ost);
13653
13654 if (TREE_CODE (ost) != INTEGER_CST
13655 || tree_int_cst_sgn (ost) < 0
13656 || compare_tree_int (ost, 3) > 0)
13657 return NULL_TREE;
13658
13659 object_size_type = tree_to_shwi (ost);
13660
13661 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
13662 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
13663 and (size_t) 0 for types 2 and 3. */
13664 if (TREE_SIDE_EFFECTS (ptr))
13665 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
13666
13667 if (TREE_CODE (ptr) == ADDR_EXPR)
13668 {
13669 compute_builtin_object_size (ptr, object_size_type, &bytes);
13670 if (wi::fits_to_tree_p (bytes, size_type_node))
13671 return build_int_cstu (size_type_node, bytes);
13672 }
13673 else if (TREE_CODE (ptr) == SSA_NAME)
13674 {
13675 /* If object size is not known yet, delay folding until
13676 later. Maybe subsequent passes will help determining
13677 it. */
13678 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
13679 && wi::fits_to_tree_p (bytes, size_type_node))
13680 return build_int_cstu (size_type_node, bytes);
13681 }
13682
13683 return NULL_TREE;
13684 }
13685
13686 /* Builtins with folding operations that operate on "..." arguments
13687 need special handling; we need to store the arguments in a convenient
13688 data structure before attempting any folding. Fortunately there are
13689 only a few builtins that fall into this category. FNDECL is the
13690 function, EXP is the CALL_EXPR for the call. */
13691
13692 static tree
13693 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
13694 {
13695 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13696 tree ret = NULL_TREE;
13697
13698 switch (fcode)
13699 {
13700 case BUILT_IN_FPCLASSIFY:
13701 ret = fold_builtin_fpclassify (loc, args, nargs);
13702 break;
13703
13704 default:
13705 break;
13706 }
13707 if (ret)
13708 {
13709 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13710 SET_EXPR_LOCATION (ret, loc);
13711 TREE_NO_WARNING (ret) = 1;
13712 return ret;
13713 }
13714 return NULL_TREE;
13715 }
13716
13717 /* Initialize format string characters in the target charset. */
13718
13719 bool
13720 init_target_chars (void)
13721 {
13722 static bool init;
13723 if (!init)
13724 {
13725 target_newline = lang_hooks.to_target_charset ('\n');
13726 target_percent = lang_hooks.to_target_charset ('%');
13727 target_c = lang_hooks.to_target_charset ('c');
13728 target_s = lang_hooks.to_target_charset ('s');
13729 if (target_newline == 0 || target_percent == 0 || target_c == 0
13730 || target_s == 0)
13731 return false;
13732
13733 target_percent_c[0] = target_percent;
13734 target_percent_c[1] = target_c;
13735 target_percent_c[2] = '\0';
13736
13737 target_percent_s[0] = target_percent;
13738 target_percent_s[1] = target_s;
13739 target_percent_s[2] = '\0';
13740
13741 target_percent_s_newline[0] = target_percent;
13742 target_percent_s_newline[1] = target_s;
13743 target_percent_s_newline[2] = target_newline;
13744 target_percent_s_newline[3] = '\0';
13745
13746 init = true;
13747 }
13748 return true;
13749 }
13750
13751 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13752 and no overflow/underflow occurred. INEXACT is true if M was not
13753 exactly calculated. TYPE is the tree type for the result. This
13754 function assumes that you cleared the MPFR flags and then
13755 calculated M to see if anything subsequently set a flag prior to
13756 entering this function. Return NULL_TREE if any checks fail. */
13757
13758 static tree
13759 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13760 {
13761 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13762 overflow/underflow occurred. If -frounding-math, proceed iff the
13763 result of calling FUNC was exact. */
13764 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13765 && (!flag_rounding_math || !inexact))
13766 {
13767 REAL_VALUE_TYPE rr;
13768
13769 real_from_mpfr (&rr, m, type, MPFR_RNDN);
13770 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13771 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13772 but the mpft_t is not, then we underflowed in the
13773 conversion. */
13774 if (real_isfinite (&rr)
13775 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13776 {
13777 REAL_VALUE_TYPE rmode;
13778
13779 real_convert (&rmode, TYPE_MODE (type), &rr);
13780 /* Proceed iff the specified mode can hold the value. */
13781 if (real_identical (&rmode, &rr))
13782 return build_real (type, rmode);
13783 }
13784 }
13785 return NULL_TREE;
13786 }
13787
13788 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13789 number and no overflow/underflow occurred. INEXACT is true if M
13790 was not exactly calculated. TYPE is the tree type for the result.
13791 This function assumes that you cleared the MPFR flags and then
13792 calculated M to see if anything subsequently set a flag prior to
13793 entering this function. Return NULL_TREE if any checks fail, if
13794 FORCE_CONVERT is true, then bypass the checks. */
13795
13796 static tree
13797 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13798 {
13799 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13800 overflow/underflow occurred. If -frounding-math, proceed iff the
13801 result of calling FUNC was exact. */
13802 if (force_convert
13803 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13804 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13805 && (!flag_rounding_math || !inexact)))
13806 {
13807 REAL_VALUE_TYPE re, im;
13808
13809 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
13810 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
13811 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13812 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13813 but the mpft_t is not, then we underflowed in the
13814 conversion. */
13815 if (force_convert
13816 || (real_isfinite (&re) && real_isfinite (&im)
13817 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13818 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13819 {
13820 REAL_VALUE_TYPE re_mode, im_mode;
13821
13822 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13823 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13824 /* Proceed iff the specified mode can hold the value. */
13825 if (force_convert
13826 || (real_identical (&re_mode, &re)
13827 && real_identical (&im_mode, &im)))
13828 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13829 build_real (TREE_TYPE (type), im_mode));
13830 }
13831 }
13832 return NULL_TREE;
13833 }
13834
13835 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13836 the pointer *(ARG_QUO) and return the result. The type is taken
13837 from the type of ARG0 and is used for setting the precision of the
13838 calculation and results. */
13839
13840 static tree
13841 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13842 {
13843 tree const type = TREE_TYPE (arg0);
13844 tree result = NULL_TREE;
13845
13846 STRIP_NOPS (arg0);
13847 STRIP_NOPS (arg1);
13848
13849 /* To proceed, MPFR must exactly represent the target floating point
13850 format, which only happens when the target base equals two. */
13851 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13852 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13853 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13854 {
13855 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13856 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13857
13858 if (real_isfinite (ra0) && real_isfinite (ra1))
13859 {
13860 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13861 const int prec = fmt->p;
13862 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
13863 tree result_rem;
13864 long integer_quo;
13865 mpfr_t m0, m1;
13866
13867 mpfr_inits2 (prec, m0, m1, NULL);
13868 mpfr_from_real (m0, ra0, MPFR_RNDN);
13869 mpfr_from_real (m1, ra1, MPFR_RNDN);
13870 mpfr_clear_flags ();
13871 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13872 /* Remquo is independent of the rounding mode, so pass
13873 inexact=0 to do_mpfr_ckconv(). */
13874 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13875 mpfr_clears (m0, m1, NULL);
13876 if (result_rem)
13877 {
13878 /* MPFR calculates quo in the host's long so it may
13879 return more bits in quo than the target int can hold
13880 if sizeof(host long) > sizeof(target int). This can
13881 happen even for native compilers in LP64 mode. In
13882 these cases, modulo the quo value with the largest
13883 number that the target int can hold while leaving one
13884 bit for the sign. */
13885 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13886 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13887
13888 /* Dereference the quo pointer argument. */
13889 arg_quo = build_fold_indirect_ref (arg_quo);
13890 /* Proceed iff a valid pointer type was passed in. */
13891 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13892 {
13893 /* Set the value. */
13894 tree result_quo
13895 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13896 build_int_cst (TREE_TYPE (arg_quo),
13897 integer_quo));
13898 TREE_SIDE_EFFECTS (result_quo) = 1;
13899 /* Combine the quo assignment with the rem. */
13900 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13901 result_quo, result_rem));
13902 }
13903 }
13904 }
13905 }
13906 return result;
13907 }
13908
13909 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13910 resulting value as a tree with type TYPE. The mpfr precision is
13911 set to the precision of TYPE. We assume that this mpfr function
13912 returns zero if the result could be calculated exactly within the
13913 requested precision. In addition, the integer pointer represented
13914 by ARG_SG will be dereferenced and set to the appropriate signgam
13915 (-1,1) value. */
13916
13917 static tree
13918 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13919 {
13920 tree result = NULL_TREE;
13921
13922 STRIP_NOPS (arg);
13923
13924 /* To proceed, MPFR must exactly represent the target floating point
13925 format, which only happens when the target base equals two. Also
13926 verify ARG is a constant and that ARG_SG is an int pointer. */
13927 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13928 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13929 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13930 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13931 {
13932 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13933
13934 /* In addition to NaN and Inf, the argument cannot be zero or a
13935 negative integer. */
13936 if (real_isfinite (ra)
13937 && ra->cl != rvc_zero
13938 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13939 {
13940 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13941 const int prec = fmt->p;
13942 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
13943 int inexact, sg;
13944 mpfr_t m;
13945 tree result_lg;
13946
13947 mpfr_init2 (m, prec);
13948 mpfr_from_real (m, ra, MPFR_RNDN);
13949 mpfr_clear_flags ();
13950 inexact = mpfr_lgamma (m, &sg, m, rnd);
13951 result_lg = do_mpfr_ckconv (m, type, inexact);
13952 mpfr_clear (m);
13953 if (result_lg)
13954 {
13955 tree result_sg;
13956
13957 /* Dereference the arg_sg pointer argument. */
13958 arg_sg = build_fold_indirect_ref (arg_sg);
13959 /* Assign the signgam value into *arg_sg. */
13960 result_sg = fold_build2 (MODIFY_EXPR,
13961 TREE_TYPE (arg_sg), arg_sg,
13962 build_int_cst (TREE_TYPE (arg_sg), sg));
13963 TREE_SIDE_EFFECTS (result_sg) = 1;
13964 /* Combine the signgam assignment with the lgamma result. */
13965 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13966 result_sg, result_lg));
13967 }
13968 }
13969 }
13970
13971 return result;
13972 }
13973
13974 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13975 mpc function FUNC on it and return the resulting value as a tree
13976 with type TYPE. The mpfr precision is set to the precision of
13977 TYPE. We assume that function FUNC returns zero if the result
13978 could be calculated exactly within the requested precision. If
13979 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13980 in the arguments and/or results. */
13981
13982 tree
13983 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13984 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13985 {
13986 tree result = NULL_TREE;
13987
13988 STRIP_NOPS (arg0);
13989 STRIP_NOPS (arg1);
13990
13991 /* To proceed, MPFR must exactly represent the target floating point
13992 format, which only happens when the target base equals two. */
13993 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13994 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13995 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13996 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13997 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13998 {
13999 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14000 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14001 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14002 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14003
14004 if (do_nonfinite
14005 || (real_isfinite (re0) && real_isfinite (im0)
14006 && real_isfinite (re1) && real_isfinite (im1)))
14007 {
14008 const struct real_format *const fmt =
14009 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14010 const int prec = fmt->p;
14011 const mpfr_rnd_t rnd = fmt->round_towards_zero
14012 ? MPFR_RNDZ : MPFR_RNDN;
14013 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14014 int inexact;
14015 mpc_t m0, m1;
14016
14017 mpc_init2 (m0, prec);
14018 mpc_init2 (m1, prec);
14019 mpfr_from_real (mpc_realref (m0), re0, rnd);
14020 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14021 mpfr_from_real (mpc_realref (m1), re1, rnd);
14022 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14023 mpfr_clear_flags ();
14024 inexact = func (m0, m0, m1, crnd);
14025 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14026 mpc_clear (m0);
14027 mpc_clear (m1);
14028 }
14029 }
14030
14031 return result;
14032 }
14033
14034 /* A wrapper function for builtin folding that prevents warnings for
14035 "statement without effect" and the like, caused by removing the
14036 call node earlier than the warning is generated. */
14037
14038 tree
14039 fold_call_stmt (gcall *stmt, bool ignore)
14040 {
14041 tree ret = NULL_TREE;
14042 tree fndecl = gimple_call_fndecl (stmt);
14043 location_t loc = gimple_location (stmt);
14044 if (fndecl && fndecl_built_in_p (fndecl)
14045 && !gimple_call_va_arg_pack_p (stmt))
14046 {
14047 int nargs = gimple_call_num_args (stmt);
14048 tree *args = (nargs > 0
14049 ? gimple_call_arg_ptr (stmt, 0)
14050 : &error_mark_node);
14051
14052 if (avoid_folding_inline_builtin (fndecl))
14053 return NULL_TREE;
14054 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14055 {
14056 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14057 }
14058 else
14059 {
14060 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
14061 if (ret)
14062 {
14063 /* Propagate location information from original call to
14064 expansion of builtin. Otherwise things like
14065 maybe_emit_chk_warning, that operate on the expansion
14066 of a builtin, will use the wrong location information. */
14067 if (gimple_has_location (stmt))
14068 {
14069 tree realret = ret;
14070 if (TREE_CODE (ret) == NOP_EXPR)
14071 realret = TREE_OPERAND (ret, 0);
14072 if (CAN_HAVE_LOCATION_P (realret)
14073 && !EXPR_HAS_LOCATION (realret))
14074 SET_EXPR_LOCATION (realret, loc);
14075 return realret;
14076 }
14077 return ret;
14078 }
14079 }
14080 }
14081 return NULL_TREE;
14082 }
14083
14084 /* Look up the function in builtin_decl that corresponds to DECL
14085 and set ASMSPEC as its user assembler name. DECL must be a
14086 function decl that declares a builtin. */
14087
14088 void
14089 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14090 {
14091 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
14092 && asmspec != 0);
14093
14094 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14095 set_user_assembler_name (builtin, asmspec);
14096
14097 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
14098 && INT_TYPE_SIZE < BITS_PER_WORD)
14099 {
14100 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
14101 set_user_assembler_libfunc ("ffs", asmspec);
14102 set_optab_libfunc (ffs_optab, mode, "ffs");
14103 }
14104 }
14105
14106 /* Return true if DECL is a builtin that expands to a constant or similarly
14107 simple code. */
14108 bool
14109 is_simple_builtin (tree decl)
14110 {
14111 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
14112 switch (DECL_FUNCTION_CODE (decl))
14113 {
14114 /* Builtins that expand to constants. */
14115 case BUILT_IN_CONSTANT_P:
14116 case BUILT_IN_EXPECT:
14117 case BUILT_IN_OBJECT_SIZE:
14118 case BUILT_IN_UNREACHABLE:
14119 /* Simple register moves or loads from stack. */
14120 case BUILT_IN_ASSUME_ALIGNED:
14121 case BUILT_IN_RETURN_ADDRESS:
14122 case BUILT_IN_EXTRACT_RETURN_ADDR:
14123 case BUILT_IN_FROB_RETURN_ADDR:
14124 case BUILT_IN_RETURN:
14125 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14126 case BUILT_IN_FRAME_ADDRESS:
14127 case BUILT_IN_VA_END:
14128 case BUILT_IN_STACK_SAVE:
14129 case BUILT_IN_STACK_RESTORE:
14130 /* Exception state returns or moves registers around. */
14131 case BUILT_IN_EH_FILTER:
14132 case BUILT_IN_EH_POINTER:
14133 case BUILT_IN_EH_COPY_VALUES:
14134 return true;
14135
14136 default:
14137 return false;
14138 }
14139
14140 return false;
14141 }
14142
14143 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14144 most probably expanded inline into reasonably simple code. This is a
14145 superset of is_simple_builtin. */
14146 bool
14147 is_inexpensive_builtin (tree decl)
14148 {
14149 if (!decl)
14150 return false;
14151 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14152 return true;
14153 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14154 switch (DECL_FUNCTION_CODE (decl))
14155 {
14156 case BUILT_IN_ABS:
14157 CASE_BUILT_IN_ALLOCA:
14158 case BUILT_IN_BSWAP16:
14159 case BUILT_IN_BSWAP32:
14160 case BUILT_IN_BSWAP64:
14161 case BUILT_IN_BSWAP128:
14162 case BUILT_IN_CLZ:
14163 case BUILT_IN_CLZIMAX:
14164 case BUILT_IN_CLZL:
14165 case BUILT_IN_CLZLL:
14166 case BUILT_IN_CTZ:
14167 case BUILT_IN_CTZIMAX:
14168 case BUILT_IN_CTZL:
14169 case BUILT_IN_CTZLL:
14170 case BUILT_IN_FFS:
14171 case BUILT_IN_FFSIMAX:
14172 case BUILT_IN_FFSL:
14173 case BUILT_IN_FFSLL:
14174 case BUILT_IN_IMAXABS:
14175 case BUILT_IN_FINITE:
14176 case BUILT_IN_FINITEF:
14177 case BUILT_IN_FINITEL:
14178 case BUILT_IN_FINITED32:
14179 case BUILT_IN_FINITED64:
14180 case BUILT_IN_FINITED128:
14181 case BUILT_IN_FPCLASSIFY:
14182 case BUILT_IN_ISFINITE:
14183 case BUILT_IN_ISINF_SIGN:
14184 case BUILT_IN_ISINF:
14185 case BUILT_IN_ISINFF:
14186 case BUILT_IN_ISINFL:
14187 case BUILT_IN_ISINFD32:
14188 case BUILT_IN_ISINFD64:
14189 case BUILT_IN_ISINFD128:
14190 case BUILT_IN_ISNAN:
14191 case BUILT_IN_ISNANF:
14192 case BUILT_IN_ISNANL:
14193 case BUILT_IN_ISNAND32:
14194 case BUILT_IN_ISNAND64:
14195 case BUILT_IN_ISNAND128:
14196 case BUILT_IN_ISNORMAL:
14197 case BUILT_IN_ISGREATER:
14198 case BUILT_IN_ISGREATEREQUAL:
14199 case BUILT_IN_ISLESS:
14200 case BUILT_IN_ISLESSEQUAL:
14201 case BUILT_IN_ISLESSGREATER:
14202 case BUILT_IN_ISUNORDERED:
14203 case BUILT_IN_VA_ARG_PACK:
14204 case BUILT_IN_VA_ARG_PACK_LEN:
14205 case BUILT_IN_VA_COPY:
14206 case BUILT_IN_TRAP:
14207 case BUILT_IN_SAVEREGS:
14208 case BUILT_IN_POPCOUNTL:
14209 case BUILT_IN_POPCOUNTLL:
14210 case BUILT_IN_POPCOUNTIMAX:
14211 case BUILT_IN_POPCOUNT:
14212 case BUILT_IN_PARITYL:
14213 case BUILT_IN_PARITYLL:
14214 case BUILT_IN_PARITYIMAX:
14215 case BUILT_IN_PARITY:
14216 case BUILT_IN_LABS:
14217 case BUILT_IN_LLABS:
14218 case BUILT_IN_PREFETCH:
14219 case BUILT_IN_ACC_ON_DEVICE:
14220 return true;
14221
14222 default:
14223 return is_simple_builtin (decl);
14224 }
14225
14226 return false;
14227 }
14228
14229 /* Return true if T is a constant and the value cast to a target char
14230 can be represented by a host char.
14231 Store the casted char constant in *P if so. */
14232
14233 bool
14234 target_char_cst_p (tree t, char *p)
14235 {
14236 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
14237 return false;
14238
14239 *p = (char)tree_to_uhwi (t);
14240 return true;
14241 }
14242
14243 /* Return true if the builtin DECL is implemented in a standard library.
14244 Otherwise returns false which doesn't guarantee it is not (thus the list of
14245 handled builtins below may be incomplete). */
14246
14247 bool
14248 builtin_with_linkage_p (tree decl)
14249 {
14250 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14251 switch (DECL_FUNCTION_CODE (decl))
14252 {
14253 CASE_FLT_FN (BUILT_IN_ACOS):
14254 CASE_FLT_FN (BUILT_IN_ACOSH):
14255 CASE_FLT_FN (BUILT_IN_ASIN):
14256 CASE_FLT_FN (BUILT_IN_ASINH):
14257 CASE_FLT_FN (BUILT_IN_ATAN):
14258 CASE_FLT_FN (BUILT_IN_ATANH):
14259 CASE_FLT_FN (BUILT_IN_ATAN2):
14260 CASE_FLT_FN (BUILT_IN_CBRT):
14261 CASE_FLT_FN (BUILT_IN_CEIL):
14262 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
14263 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14264 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
14265 CASE_FLT_FN (BUILT_IN_COS):
14266 CASE_FLT_FN (BUILT_IN_COSH):
14267 CASE_FLT_FN (BUILT_IN_ERF):
14268 CASE_FLT_FN (BUILT_IN_ERFC):
14269 CASE_FLT_FN (BUILT_IN_EXP):
14270 CASE_FLT_FN (BUILT_IN_EXP2):
14271 CASE_FLT_FN (BUILT_IN_EXPM1):
14272 CASE_FLT_FN (BUILT_IN_FABS):
14273 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
14274 CASE_FLT_FN (BUILT_IN_FDIM):
14275 CASE_FLT_FN (BUILT_IN_FLOOR):
14276 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
14277 CASE_FLT_FN (BUILT_IN_FMA):
14278 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
14279 CASE_FLT_FN (BUILT_IN_FMAX):
14280 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
14281 CASE_FLT_FN (BUILT_IN_FMIN):
14282 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
14283 CASE_FLT_FN (BUILT_IN_FMOD):
14284 CASE_FLT_FN (BUILT_IN_FREXP):
14285 CASE_FLT_FN (BUILT_IN_HYPOT):
14286 CASE_FLT_FN (BUILT_IN_ILOGB):
14287 CASE_FLT_FN (BUILT_IN_LDEXP):
14288 CASE_FLT_FN (BUILT_IN_LGAMMA):
14289 CASE_FLT_FN (BUILT_IN_LLRINT):
14290 CASE_FLT_FN (BUILT_IN_LLROUND):
14291 CASE_FLT_FN (BUILT_IN_LOG):
14292 CASE_FLT_FN (BUILT_IN_LOG10):
14293 CASE_FLT_FN (BUILT_IN_LOG1P):
14294 CASE_FLT_FN (BUILT_IN_LOG2):
14295 CASE_FLT_FN (BUILT_IN_LOGB):
14296 CASE_FLT_FN (BUILT_IN_LRINT):
14297 CASE_FLT_FN (BUILT_IN_LROUND):
14298 CASE_FLT_FN (BUILT_IN_MODF):
14299 CASE_FLT_FN (BUILT_IN_NAN):
14300 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14301 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
14302 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
14303 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
14304 CASE_FLT_FN (BUILT_IN_POW):
14305 CASE_FLT_FN (BUILT_IN_REMAINDER):
14306 CASE_FLT_FN (BUILT_IN_REMQUO):
14307 CASE_FLT_FN (BUILT_IN_RINT):
14308 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
14309 CASE_FLT_FN (BUILT_IN_ROUND):
14310 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
14311 CASE_FLT_FN (BUILT_IN_SCALBLN):
14312 CASE_FLT_FN (BUILT_IN_SCALBN):
14313 CASE_FLT_FN (BUILT_IN_SIN):
14314 CASE_FLT_FN (BUILT_IN_SINH):
14315 CASE_FLT_FN (BUILT_IN_SINCOS):
14316 CASE_FLT_FN (BUILT_IN_SQRT):
14317 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
14318 CASE_FLT_FN (BUILT_IN_TAN):
14319 CASE_FLT_FN (BUILT_IN_TANH):
14320 CASE_FLT_FN (BUILT_IN_TGAMMA):
14321 CASE_FLT_FN (BUILT_IN_TRUNC):
14322 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
14323 return true;
14324 default:
14325 break;
14326 }
14327 return false;
14328 }
14329
14330 /* Return true if OFFRNG is bounded to a subrange of offset values
14331 valid for the largest possible object. */
14332
14333 bool
14334 access_ref::offset_bounded () const
14335 {
14336 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
14337 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
14338 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
14339 }
14340
14341 /* If CALLEE has known side effects, fill in INFO and return true.
14342 See tree-ssa-structalias.c:find_func_aliases
14343 for the list of builtins we might need to handle here. */
14344
14345 attr_fnspec
14346 builtin_fnspec (tree callee)
14347 {
14348 built_in_function code = DECL_FUNCTION_CODE (callee);
14349
14350 switch (code)
14351 {
14352 /* All the following functions read memory pointed to by
14353 their second argument and write memory pointed to by first
14354 argument.
14355 strcat/strncat additionally reads memory pointed to by the first
14356 argument. */
14357 case BUILT_IN_STRCAT:
14358 case BUILT_IN_STRCAT_CHK:
14359 return "1cW 1 ";
14360 case BUILT_IN_STRNCAT:
14361 case BUILT_IN_STRNCAT_CHK:
14362 return "1cW 13";
14363 case BUILT_IN_STRCPY:
14364 case BUILT_IN_STRCPY_CHK:
14365 return "1cO 1 ";
14366 case BUILT_IN_STPCPY:
14367 case BUILT_IN_STPCPY_CHK:
14368 return ".cO 1 ";
14369 case BUILT_IN_STRNCPY:
14370 case BUILT_IN_MEMCPY:
14371 case BUILT_IN_MEMMOVE:
14372 case BUILT_IN_TM_MEMCPY:
14373 case BUILT_IN_TM_MEMMOVE:
14374 case BUILT_IN_STRNCPY_CHK:
14375 case BUILT_IN_MEMCPY_CHK:
14376 case BUILT_IN_MEMMOVE_CHK:
14377 return "1cO313";
14378 case BUILT_IN_MEMPCPY:
14379 case BUILT_IN_MEMPCPY_CHK:
14380 return ".cO313";
14381 case BUILT_IN_STPNCPY:
14382 case BUILT_IN_STPNCPY_CHK:
14383 return ".cO313";
14384 case BUILT_IN_BCOPY:
14385 return ".c23O3";
14386 case BUILT_IN_BZERO:
14387 return ".cO2";
14388 case BUILT_IN_MEMCMP:
14389 case BUILT_IN_MEMCMP_EQ:
14390 case BUILT_IN_BCMP:
14391 case BUILT_IN_STRNCMP:
14392 case BUILT_IN_STRNCMP_EQ:
14393 case BUILT_IN_STRNCASECMP:
14394 return ".cR3R3";
14395
14396 /* The following functions read memory pointed to by their
14397 first argument. */
14398 CASE_BUILT_IN_TM_LOAD (1):
14399 CASE_BUILT_IN_TM_LOAD (2):
14400 CASE_BUILT_IN_TM_LOAD (4):
14401 CASE_BUILT_IN_TM_LOAD (8):
14402 CASE_BUILT_IN_TM_LOAD (FLOAT):
14403 CASE_BUILT_IN_TM_LOAD (DOUBLE):
14404 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
14405 CASE_BUILT_IN_TM_LOAD (M64):
14406 CASE_BUILT_IN_TM_LOAD (M128):
14407 CASE_BUILT_IN_TM_LOAD (M256):
14408 case BUILT_IN_TM_LOG:
14409 case BUILT_IN_TM_LOG_1:
14410 case BUILT_IN_TM_LOG_2:
14411 case BUILT_IN_TM_LOG_4:
14412 case BUILT_IN_TM_LOG_8:
14413 case BUILT_IN_TM_LOG_FLOAT:
14414 case BUILT_IN_TM_LOG_DOUBLE:
14415 case BUILT_IN_TM_LOG_LDOUBLE:
14416 case BUILT_IN_TM_LOG_M64:
14417 case BUILT_IN_TM_LOG_M128:
14418 case BUILT_IN_TM_LOG_M256:
14419 return ".cR ";
14420
14421 case BUILT_IN_INDEX:
14422 case BUILT_IN_RINDEX:
14423 case BUILT_IN_STRCHR:
14424 case BUILT_IN_STRLEN:
14425 case BUILT_IN_STRRCHR:
14426 return ".cR ";
14427 case BUILT_IN_STRNLEN:
14428 return ".cR2";
14429
14430 /* These read memory pointed to by the first argument.
14431 Allocating memory does not have any side-effects apart from
14432 being the definition point for the pointer.
14433 Unix98 specifies that errno is set on allocation failure. */
14434 case BUILT_IN_STRDUP:
14435 return "mCR ";
14436 case BUILT_IN_STRNDUP:
14437 return "mCR2";
14438 /* Allocating memory does not have any side-effects apart from
14439 being the definition point for the pointer. */
14440 case BUILT_IN_MALLOC:
14441 case BUILT_IN_ALIGNED_ALLOC:
14442 case BUILT_IN_CALLOC:
14443 case BUILT_IN_GOMP_ALLOC:
14444 return "mC";
14445 CASE_BUILT_IN_ALLOCA:
14446 return "mc";
14447 /* These read memory pointed to by the first argument with size
14448 in the third argument. */
14449 case BUILT_IN_MEMCHR:
14450 return ".cR3";
14451 /* These read memory pointed to by the first and second arguments. */
14452 case BUILT_IN_STRSTR:
14453 case BUILT_IN_STRPBRK:
14454 case BUILT_IN_STRCASECMP:
14455 case BUILT_IN_STRCSPN:
14456 case BUILT_IN_STRSPN:
14457 case BUILT_IN_STRCMP:
14458 case BUILT_IN_STRCMP_EQ:
14459 return ".cR R ";
14460 /* Freeing memory kills the pointed-to memory. More importantly
14461 the call has to serve as a barrier for moving loads and stores
14462 across it. */
14463 case BUILT_IN_STACK_RESTORE:
14464 case BUILT_IN_FREE:
14465 case BUILT_IN_GOMP_FREE:
14466 return ".co ";
14467 case BUILT_IN_VA_END:
14468 return ".cO ";
14469 /* Realloc serves both as allocation point and deallocation point. */
14470 case BUILT_IN_REALLOC:
14471 return ".Cw ";
14472 case BUILT_IN_GAMMA_R:
14473 case BUILT_IN_GAMMAF_R:
14474 case BUILT_IN_GAMMAL_R:
14475 case BUILT_IN_LGAMMA_R:
14476 case BUILT_IN_LGAMMAF_R:
14477 case BUILT_IN_LGAMMAL_R:
14478 return ".C. Ot";
14479 case BUILT_IN_FREXP:
14480 case BUILT_IN_FREXPF:
14481 case BUILT_IN_FREXPL:
14482 case BUILT_IN_MODF:
14483 case BUILT_IN_MODFF:
14484 case BUILT_IN_MODFL:
14485 return ".c. Ot";
14486 case BUILT_IN_REMQUO:
14487 case BUILT_IN_REMQUOF:
14488 case BUILT_IN_REMQUOL:
14489 return ".c. . Ot";
14490 case BUILT_IN_SINCOS:
14491 case BUILT_IN_SINCOSF:
14492 case BUILT_IN_SINCOSL:
14493 return ".c. OtOt";
14494 case BUILT_IN_MEMSET:
14495 case BUILT_IN_MEMSET_CHK:
14496 case BUILT_IN_TM_MEMSET:
14497 return "1cO3";
14498 CASE_BUILT_IN_TM_STORE (1):
14499 CASE_BUILT_IN_TM_STORE (2):
14500 CASE_BUILT_IN_TM_STORE (4):
14501 CASE_BUILT_IN_TM_STORE (8):
14502 CASE_BUILT_IN_TM_STORE (FLOAT):
14503 CASE_BUILT_IN_TM_STORE (DOUBLE):
14504 CASE_BUILT_IN_TM_STORE (LDOUBLE):
14505 CASE_BUILT_IN_TM_STORE (M64):
14506 CASE_BUILT_IN_TM_STORE (M128):
14507 CASE_BUILT_IN_TM_STORE (M256):
14508 return ".cO ";
14509 case BUILT_IN_STACK_SAVE:
14510 return ".c";
14511 case BUILT_IN_ASSUME_ALIGNED:
14512 return "1cX ";
14513 /* But posix_memalign stores a pointer into the memory pointed to
14514 by its first argument. */
14515 case BUILT_IN_POSIX_MEMALIGN:
14516 return ".cOt";
14517
14518 default:
14519 return "";
14520 }
14521 }