builtins: Fix up two bugs in access_ref::inform_access [PR98721]
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82
83 struct target_builtins default_target_builtins;
84 #if SWITCHABLE_TARGET
85 struct target_builtins *this_target_builtins = &default_target_builtins;
86 #endif
87
88 /* Define the names of the builtin function types and codes. */
89 const char *const built_in_class_names[BUILT_IN_LAST]
90 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
91
92 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
93 const char * built_in_names[(int) END_BUILTINS] =
94 {
95 #include "builtins.def"
96 };
97
98 /* Setup an array of builtin_info_type, make sure each element decl is
99 initialized to NULL_TREE. */
100 builtin_info_type builtin_info[(int)END_BUILTINS];
101
102 /* Non-zero if __builtin_constant_p should be folded right away. */
103 bool force_folding_builtin_constant_p;
104
105 static int target_char_cast (tree, char *);
106 static rtx get_memory_rtx (tree, tree);
107 static int apply_args_size (void);
108 static int apply_result_size (void);
109 static rtx result_vector (int, rtx);
110 static void expand_builtin_prefetch (tree);
111 static rtx expand_builtin_apply_args (void);
112 static rtx expand_builtin_apply_args_1 (void);
113 static rtx expand_builtin_apply (rtx, rtx, rtx);
114 static void expand_builtin_return (rtx);
115 static enum type_class type_to_class (tree);
116 static rtx expand_builtin_classify_type (tree);
117 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
119 static rtx expand_builtin_interclass_mathfn (tree, rtx);
120 static rtx expand_builtin_sincos (tree);
121 static rtx expand_builtin_cexpi (tree, rtx);
122 static rtx expand_builtin_int_roundingfn (tree, rtx);
123 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
124 static rtx expand_builtin_next_arg (void);
125 static rtx expand_builtin_va_start (tree);
126 static rtx expand_builtin_va_end (tree);
127 static rtx expand_builtin_va_copy (tree);
128 static rtx inline_expand_builtin_bytecmp (tree, rtx);
129 static rtx expand_builtin_strcmp (tree, rtx);
130 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
131 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
132 static rtx expand_builtin_memchr (tree, rtx);
133 static rtx expand_builtin_memcpy (tree, rtx);
134 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
135 rtx target, tree exp,
136 memop_ret retmode,
137 bool might_overlap);
138 static rtx expand_builtin_memmove (tree, rtx);
139 static rtx expand_builtin_mempcpy (tree, rtx);
140 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
141 static rtx expand_builtin_strcat (tree);
142 static rtx expand_builtin_strcpy (tree, rtx);
143 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
144 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
145 static rtx expand_builtin_stpncpy (tree, rtx);
146 static rtx expand_builtin_strncat (tree, rtx);
147 static rtx expand_builtin_strncpy (tree, rtx);
148 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
149 static rtx expand_builtin_memset (tree, rtx, machine_mode);
150 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
151 static rtx expand_builtin_bzero (tree);
152 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
153 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
154 static rtx expand_builtin_alloca (tree);
155 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
156 static rtx expand_builtin_frame_address (tree, tree);
157 static tree stabilize_va_list_loc (location_t, tree, int);
158 static rtx expand_builtin_expect (tree, rtx);
159 static rtx expand_builtin_expect_with_probability (tree, rtx);
160 static tree fold_builtin_constant_p (tree);
161 static tree fold_builtin_classify_type (tree);
162 static tree fold_builtin_strlen (location_t, tree, tree, tree);
163 static tree fold_builtin_inf (location_t, tree, int);
164 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
165 static bool validate_arg (const_tree, enum tree_code code);
166 static rtx expand_builtin_fabs (tree, rtx, rtx);
167 static rtx expand_builtin_signbit (tree, rtx);
168 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_varargs (location_t, tree, tree*, int);
177
178 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_strspn (location_t, tree, tree, tree);
180 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
181
182 static rtx expand_builtin_object_size (tree);
183 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
184 enum built_in_function);
185 static void maybe_emit_chk_warning (tree, enum built_in_function);
186 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
187 static tree fold_builtin_object_size (tree, tree);
188 static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
189 static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
190 pointer_query *);
191
192 unsigned HOST_WIDE_INT target_newline;
193 unsigned HOST_WIDE_INT target_percent;
194 static unsigned HOST_WIDE_INT target_c;
195 static unsigned HOST_WIDE_INT target_s;
196 char target_percent_c[3];
197 char target_percent_s[3];
198 char target_percent_s_newline[4];
199 static tree do_mpfr_remquo (tree, tree, tree);
200 static tree do_mpfr_lgamma_r (tree, tree, tree);
201 static void expand_builtin_sync_synchronize (void);
202
203 access_ref::access_ref (tree bound /* = NULL_TREE */,
204 bool minaccess /* = false */)
205 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
206 base0 (true), parmarray ()
207 {
208 /* Set to valid. */
209 offrng[0] = offrng[1] = 0;
210 /* Invalidate. */
211 sizrng[0] = sizrng[1] = -1;
212
213 /* Set the default bounds of the access and adjust below. */
214 bndrng[0] = minaccess ? 1 : 0;
215 bndrng[1] = HOST_WIDE_INT_M1U;
216
217 /* When BOUND is nonnull and a range can be extracted from it,
218 set the bounds of the access to reflect both it and MINACCESS.
219 BNDRNG[0] is the size of the minimum access. */
220 tree rng[2];
221 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
222 {
223 bndrng[0] = wi::to_offset (rng[0]);
224 bndrng[1] = wi::to_offset (rng[1]);
225 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
226 }
227 }
228
229 /* Return the PHI node REF refers to or null if it doesn't. */
230
231 gphi *
232 access_ref::phi () const
233 {
234 if (!ref || TREE_CODE (ref) != SSA_NAME)
235 return NULL;
236
237 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
238 if (gimple_code (def_stmt) != GIMPLE_PHI)
239 return NULL;
240
241 return as_a <gphi *> (def_stmt);
242 }
243
244 /* Determine and return the largest object to which *THIS. If *THIS
245 refers to a PHI and PREF is nonnull, fill *PREF with the details
246 of the object determined by compute_objsize(ARG, OSTYPE) for each
247 PHI argument ARG. */
248
249 tree
250 access_ref::get_ref (vec<access_ref> *all_refs,
251 access_ref *pref /* = NULL */,
252 int ostype /* = 1 */,
253 ssa_name_limit_t *psnlim /* = NULL */,
254 pointer_query *qry /* = NULL */) const
255 {
256 gphi *phi_stmt = this->phi ();
257 if (!phi_stmt)
258 return ref;
259
260 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
261 cause unbounded recursion. */
262 ssa_name_limit_t snlim_buf;
263 if (!psnlim)
264 psnlim = &snlim_buf;
265
266 if (!psnlim->visit_phi (ref))
267 return NULL_TREE;
268
269 /* Reflects the range of offsets of all PHI arguments refer to the same
270 object (i.e., have the same REF). */
271 access_ref same_ref;
272 /* The conservative result of the PHI reflecting the offset and size
273 of the largest PHI argument, regardless of whether or not they all
274 refer to the same object. */
275 pointer_query empty_qry;
276 if (!qry)
277 qry = &empty_qry;
278
279 access_ref phi_ref;
280 if (pref)
281 {
282 phi_ref = *pref;
283 same_ref = *pref;
284 }
285
286 /* Set if any argument is a function array (or VLA) parameter not
287 declared [static]. */
288 bool parmarray = false;
289 /* The size of the smallest object referenced by the PHI arguments. */
290 offset_int minsize = 0;
291 const offset_int maxobjsize = wi::to_offset (max_object_size ());
292 /* The offset of the PHI, not reflecting those of its arguments. */
293 const offset_int orng[2] = { phi_ref.offrng[0], phi_ref.offrng[1] };
294
295 const unsigned nargs = gimple_phi_num_args (phi_stmt);
296 for (unsigned i = 0; i < nargs; ++i)
297 {
298 access_ref phi_arg_ref;
299 tree arg = gimple_phi_arg_def (phi_stmt, i);
300 if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
301 || phi_arg_ref.sizrng[0] < 0)
302 /* A PHI with all null pointer arguments. */
303 return NULL_TREE;
304
305 /* Add PREF's offset to that of the argument. */
306 phi_arg_ref.add_offset (orng[0], orng[1]);
307 if (TREE_CODE (arg) == SSA_NAME)
308 qry->put_ref (arg, phi_arg_ref);
309
310 if (all_refs)
311 all_refs->safe_push (phi_arg_ref);
312
313 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
314 || phi_arg_ref.sizrng[1] != maxobjsize);
315
316 parmarray |= phi_arg_ref.parmarray;
317
318 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
319
320 if (phi_ref.sizrng[0] < 0)
321 {
322 if (!nullp)
323 same_ref = phi_arg_ref;
324 phi_ref = phi_arg_ref;
325 if (arg_known_size)
326 minsize = phi_arg_ref.sizrng[0];
327 continue;
328 }
329
330 const bool phi_known_size = (phi_ref.sizrng[0] != 0
331 || phi_ref.sizrng[1] != maxobjsize);
332
333 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
334 minsize = phi_arg_ref.sizrng[0];
335
336 /* Disregard null pointers in PHIs with two or more arguments.
337 TODO: Handle this better! */
338 if (nullp)
339 continue;
340
341 /* Determine the amount of remaining space in the argument. */
342 offset_int argrem[2];
343 argrem[1] = phi_arg_ref.size_remaining (argrem);
344
345 /* Determine the amount of remaining space computed so far and
346 if the remaining space in the argument is more use it instead. */
347 offset_int phirem[2];
348 phirem[1] = phi_ref.size_remaining (phirem);
349
350 if (phi_arg_ref.ref != same_ref.ref)
351 same_ref.ref = NULL_TREE;
352
353 if (phirem[1] < argrem[1]
354 || (phirem[1] == argrem[1]
355 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
356 /* Use the argument with the most space remaining as the result,
357 or the larger one if the space is equal. */
358 phi_ref = phi_arg_ref;
359
360 /* Set SAME_REF.OFFRNG to the maximum range of all arguments. */
361 if (phi_arg_ref.offrng[0] < same_ref.offrng[0])
362 same_ref.offrng[0] = phi_arg_ref.offrng[0];
363 if (same_ref.offrng[1] < phi_arg_ref.offrng[1])
364 same_ref.offrng[1] = phi_arg_ref.offrng[1];
365 }
366
367 if (phi_ref.sizrng[0] < 0)
368 {
369 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
370 (perhaps because they have all been already visited by prior
371 recursive calls). */
372 psnlim->leave_phi (ref);
373 return NULL_TREE;
374 }
375
376 if (!same_ref.ref && same_ref.offrng[0] != 0)
377 /* Clear BASE0 if not all the arguments refer to the same object and
378 if not all their offsets are zero-based. This allows the final
379 PHI offset to out of bounds for some arguments but not for others
380 (or negative even of all the arguments are BASE0), which is overly
381 permissive. */
382 phi_ref.base0 = false;
383
384 if (same_ref.ref)
385 phi_ref = same_ref;
386 else
387 {
388 /* Replace the lower bound of the largest argument with the size
389 of the smallest argument, and set PARMARRAY if any argument
390 was one. */
391 phi_ref.sizrng[0] = minsize;
392 phi_ref.parmarray = parmarray;
393 }
394
395 /* Avoid changing *THIS. */
396 if (pref && pref != this)
397 *pref = phi_ref;
398
399 psnlim->leave_phi (ref);
400
401 return phi_ref.ref;
402 }
403
404 /* Return the maximum amount of space remaining and if non-null, set
405 argument to the minimum. */
406
407 offset_int
408 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
409 {
410 offset_int minbuf;
411 if (!pmin)
412 pmin = &minbuf;
413
414 /* add_offset() ensures the offset range isn't inverted. */
415 gcc_checking_assert (offrng[0] <= offrng[1]);
416
417 if (base0)
418 {
419 /* The offset into referenced object is zero-based (i.e., it's
420 not referenced by a pointer into middle of some unknown object). */
421 if (offrng[0] < 0 && offrng[1] < 0)
422 {
423 /* If the offset is negative the remaining size is zero. */
424 *pmin = 0;
425 return 0;
426 }
427
428 if (sizrng[1] <= offrng[0])
429 {
430 /* If the starting offset is greater than or equal to the upper
431 bound on the size of the object, the space remaining is zero.
432 As a special case, if it's equal, set *PMIN to -1 to let
433 the caller know the offset is valid and just past the end. */
434 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
435 return 0;
436 }
437
438 /* Otherwise return the size minus the lower bound of the offset. */
439 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
440
441 *pmin = sizrng[0] - or0;
442 return sizrng[1] - or0;
443 }
444
445 /* The offset to the referenced object isn't zero-based (i.e., it may
446 refer to a byte other than the first. The size of such an object
447 is constrained only by the size of the address space (the result
448 of max_object_size()). */
449 if (sizrng[1] <= offrng[0])
450 {
451 *pmin = 0;
452 return 0;
453 }
454
455 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
456
457 *pmin = sizrng[0] - or0;
458 return sizrng[1] - or0;
459 }
460
461 /* Add the range [MIN, MAX] to the offset range. For known objects (with
462 zero-based offsets) at least one of whose offset's bounds is in range,
463 constrain the other (or both) to the bounds of the object (i.e., zero
464 and the upper bound of its size). This improves the quality of
465 diagnostics. */
466
467 void access_ref::add_offset (const offset_int &min, const offset_int &max)
468 {
469 if (min <= max)
470 {
471 /* To add an ordinary range just add it to the bounds. */
472 offrng[0] += min;
473 offrng[1] += max;
474 }
475 else if (!base0)
476 {
477 /* To add an inverted range to an offset to an unknown object
478 expand it to the maximum. */
479 add_max_offset ();
480 return;
481 }
482 else
483 {
484 /* To add an inverted range to an offset to an known object set
485 the upper bound to the maximum representable offset value
486 (which may be greater than MAX_OBJECT_SIZE).
487 The lower bound is either the sum of the current offset and
488 MIN when abs(MAX) is greater than the former, or zero otherwise.
489 Zero because then then inverted range includes the negative of
490 the lower bound. */
491 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
492 offrng[1] = maxoff;
493
494 if (max >= 0)
495 {
496 offrng[0] = 0;
497 return;
498 }
499
500 offset_int absmax = wi::abs (max);
501 if (offrng[0] < absmax)
502 {
503 offrng[0] += min;
504 /* Cap the lower bound at the upper (set to MAXOFF above)
505 to avoid inadvertently recreating an inverted range. */
506 if (offrng[1] < offrng[0])
507 offrng[0] = offrng[1];
508 }
509 else
510 offrng[0] = 0;
511 }
512
513 if (!base0)
514 return;
515
516 /* When referencing a known object check to see if the offset computed
517 so far is in bounds... */
518 offset_int remrng[2];
519 remrng[1] = size_remaining (remrng);
520 if (remrng[1] > 0 || remrng[0] < 0)
521 {
522 /* ...if so, constrain it so that neither bound exceeds the size of
523 the object. Out of bounds offsets are left unchanged, and, for
524 better or worse, become in bounds later. They should be detected
525 and diagnosed at the point they first become invalid by
526 -Warray-bounds. */
527 if (offrng[0] < 0)
528 offrng[0] = 0;
529 if (offrng[1] > sizrng[1])
530 offrng[1] = sizrng[1];
531 }
532 }
533
534 /* Set a bit for the PHI in VISITED and return true if it wasn't
535 already set. */
536
537 bool
538 ssa_name_limit_t::visit_phi (tree ssa_name)
539 {
540 if (!visited)
541 visited = BITMAP_ALLOC (NULL);
542
543 /* Return false if SSA_NAME has already been visited. */
544 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
545 }
546
547 /* Clear a bit for the PHI in VISITED. */
548
549 void
550 ssa_name_limit_t::leave_phi (tree ssa_name)
551 {
552 /* Return false if SSA_NAME has already been visited. */
553 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
554 }
555
556 /* Return false if the SSA_NAME chain length counter has reached
557 the limit, otherwise increment the counter and return true. */
558
559 bool
560 ssa_name_limit_t::next ()
561 {
562 /* Return a negative value to let caller avoid recursing beyond
563 the specified limit. */
564 if (ssa_def_max == 0)
565 return false;
566
567 --ssa_def_max;
568 return true;
569 }
570
571 /* If the SSA_NAME has already been "seen" return a positive value.
572 Otherwise add it to VISITED. If the SSA_NAME limit has been
573 reached, return a negative value. Otherwise return zero. */
574
575 int
576 ssa_name_limit_t::next_phi (tree ssa_name)
577 {
578 {
579 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
580 /* Return a positive value if the PHI has already been visited. */
581 if (gimple_code (def_stmt) == GIMPLE_PHI
582 && !visit_phi (ssa_name))
583 return 1;
584 }
585
586 /* Return a negative value to let caller avoid recursing beyond
587 the specified limit. */
588 if (ssa_def_max == 0)
589 return -1;
590
591 --ssa_def_max;
592
593 return 0;
594 }
595
596 ssa_name_limit_t::~ssa_name_limit_t ()
597 {
598 if (visited)
599 BITMAP_FREE (visited);
600 }
601
602 /* Default ctor. Initialize object with pointers to the range_query
603 and cache_type instances to use or null. */
604
605 pointer_query::pointer_query (range_query *qry /* = NULL */,
606 cache_type *cache /* = NULL */)
607 : rvals (qry), var_cache (cache), hits (), misses (),
608 failures (), depth (), max_depth ()
609 {
610 /* No op. */
611 }
612
613 /* Return a pointer to the cached access_ref instance for the SSA_NAME
614 PTR if it's there or null otherwise. */
615
616 const access_ref *
617 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
618 {
619 if (!var_cache)
620 {
621 ++misses;
622 return NULL;
623 }
624
625 unsigned version = SSA_NAME_VERSION (ptr);
626 unsigned idx = version << 1 | (ostype & 1);
627 if (var_cache->indices.length () <= idx)
628 {
629 ++misses;
630 return NULL;
631 }
632
633 unsigned cache_idx = var_cache->indices[idx];
634 if (var_cache->access_refs.length () <= cache_idx)
635 {
636 ++misses;
637 return NULL;
638 }
639
640 access_ref &cache_ref = var_cache->access_refs[cache_idx];
641 if (cache_ref.ref)
642 {
643 ++hits;
644 return &cache_ref;
645 }
646
647 ++misses;
648 return NULL;
649 }
650
651 /* Retrieve the access_ref instance for a variable from the cache if it's
652 there or compute it and insert it into the cache if it's nonnonull. */
653
654 bool
655 pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
656 {
657 const unsigned version
658 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
659
660 if (var_cache && version)
661 {
662 unsigned idx = version << 1 | (ostype & 1);
663 if (idx < var_cache->indices.length ())
664 {
665 unsigned cache_idx = var_cache->indices[idx] - 1;
666 if (cache_idx < var_cache->access_refs.length ()
667 && var_cache->access_refs[cache_idx].ref)
668 {
669 ++hits;
670 *pref = var_cache->access_refs[cache_idx];
671 return true;
672 }
673 }
674
675 ++misses;
676 }
677
678 if (!compute_objsize (ptr, ostype, pref, this))
679 {
680 ++failures;
681 return false;
682 }
683
684 return true;
685 }
686
687 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
688 nonnull. */
689
690 void
691 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
692 {
693 /* Only add populated/valid entries. */
694 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
695 return;
696
697 /* Add REF to the two-level cache. */
698 unsigned version = SSA_NAME_VERSION (ptr);
699 unsigned idx = version << 1 | (ostype & 1);
700
701 /* Grow INDICES if necessary. An index is valid if it's nonzero.
702 Its value minus one is the index into ACCESS_REFS. Not all
703 entries are valid. */
704 if (var_cache->indices.length () <= idx)
705 var_cache->indices.safe_grow_cleared (idx + 1);
706
707 if (!var_cache->indices[idx])
708 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
709
710 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
711 REF member is nonnull. All entries except for the last two
712 are valid. Once nonnull, the REF value must stay unchanged. */
713 unsigned cache_idx = var_cache->indices[idx];
714 if (var_cache->access_refs.length () <= cache_idx)
715 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
716
717 access_ref cache_ref = var_cache->access_refs[cache_idx - 1];
718 if (cache_ref.ref)
719 {
720 gcc_checking_assert (cache_ref.ref == ref.ref);
721 return;
722 }
723
724 cache_ref = ref;
725 }
726
727 /* Flush the cache if it's nonnull. */
728
729 void
730 pointer_query::flush_cache ()
731 {
732 if (!var_cache)
733 return;
734 var_cache->indices.release ();
735 var_cache->access_refs.release ();
736 }
737
738 /* Return true if NAME starts with __builtin_ or __sync_. */
739
740 static bool
741 is_builtin_name (const char *name)
742 {
743 if (strncmp (name, "__builtin_", 10) == 0)
744 return true;
745 if (strncmp (name, "__sync_", 7) == 0)
746 return true;
747 if (strncmp (name, "__atomic_", 9) == 0)
748 return true;
749 return false;
750 }
751
752 /* Return true if NODE should be considered for inline expansion regardless
753 of the optimization level. This means whenever a function is invoked with
754 its "internal" name, which normally contains the prefix "__builtin". */
755
756 bool
757 called_as_built_in (tree node)
758 {
759 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
760 we want the name used to call the function, not the name it
761 will have. */
762 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
763 return is_builtin_name (name);
764 }
765
766 /* Compute values M and N such that M divides (address of EXP - N) and such
767 that N < M. If these numbers can be determined, store M in alignp and N in
768 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
769 *alignp and any bit-offset to *bitposp.
770
771 Note that the address (and thus the alignment) computed here is based
772 on the address to which a symbol resolves, whereas DECL_ALIGN is based
773 on the address at which an object is actually located. These two
774 addresses are not always the same. For example, on ARM targets,
775 the address &foo of a Thumb function foo() has the lowest bit set,
776 whereas foo() itself starts on an even address.
777
778 If ADDR_P is true we are taking the address of the memory reference EXP
779 and thus cannot rely on the access taking place. */
780
781 static bool
782 get_object_alignment_2 (tree exp, unsigned int *alignp,
783 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
784 {
785 poly_int64 bitsize, bitpos;
786 tree offset;
787 machine_mode mode;
788 int unsignedp, reversep, volatilep;
789 unsigned int align = BITS_PER_UNIT;
790 bool known_alignment = false;
791
792 /* Get the innermost object and the constant (bitpos) and possibly
793 variable (offset) offset of the access. */
794 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
795 &unsignedp, &reversep, &volatilep);
796
797 /* Extract alignment information from the innermost object and
798 possibly adjust bitpos and offset. */
799 if (TREE_CODE (exp) == FUNCTION_DECL)
800 {
801 /* Function addresses can encode extra information besides their
802 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
803 allows the low bit to be used as a virtual bit, we know
804 that the address itself must be at least 2-byte aligned. */
805 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
806 align = 2 * BITS_PER_UNIT;
807 }
808 else if (TREE_CODE (exp) == LABEL_DECL)
809 ;
810 else if (TREE_CODE (exp) == CONST_DECL)
811 {
812 /* The alignment of a CONST_DECL is determined by its initializer. */
813 exp = DECL_INITIAL (exp);
814 align = TYPE_ALIGN (TREE_TYPE (exp));
815 if (CONSTANT_CLASS_P (exp))
816 align = targetm.constant_alignment (exp, align);
817
818 known_alignment = true;
819 }
820 else if (DECL_P (exp))
821 {
822 align = DECL_ALIGN (exp);
823 known_alignment = true;
824 }
825 else if (TREE_CODE (exp) == INDIRECT_REF
826 || TREE_CODE (exp) == MEM_REF
827 || TREE_CODE (exp) == TARGET_MEM_REF)
828 {
829 tree addr = TREE_OPERAND (exp, 0);
830 unsigned ptr_align;
831 unsigned HOST_WIDE_INT ptr_bitpos;
832 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
833
834 /* If the address is explicitely aligned, handle that. */
835 if (TREE_CODE (addr) == BIT_AND_EXPR
836 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
837 {
838 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
839 ptr_bitmask *= BITS_PER_UNIT;
840 align = least_bit_hwi (ptr_bitmask);
841 addr = TREE_OPERAND (addr, 0);
842 }
843
844 known_alignment
845 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
846 align = MAX (ptr_align, align);
847
848 /* Re-apply explicit alignment to the bitpos. */
849 ptr_bitpos &= ptr_bitmask;
850
851 /* The alignment of the pointer operand in a TARGET_MEM_REF
852 has to take the variable offset parts into account. */
853 if (TREE_CODE (exp) == TARGET_MEM_REF)
854 {
855 if (TMR_INDEX (exp))
856 {
857 unsigned HOST_WIDE_INT step = 1;
858 if (TMR_STEP (exp))
859 step = TREE_INT_CST_LOW (TMR_STEP (exp));
860 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
861 }
862 if (TMR_INDEX2 (exp))
863 align = BITS_PER_UNIT;
864 known_alignment = false;
865 }
866
867 /* When EXP is an actual memory reference then we can use
868 TYPE_ALIGN of a pointer indirection to derive alignment.
869 Do so only if get_pointer_alignment_1 did not reveal absolute
870 alignment knowledge and if using that alignment would
871 improve the situation. */
872 unsigned int talign;
873 if (!addr_p && !known_alignment
874 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
875 && talign > align)
876 align = talign;
877 else
878 {
879 /* Else adjust bitpos accordingly. */
880 bitpos += ptr_bitpos;
881 if (TREE_CODE (exp) == MEM_REF
882 || TREE_CODE (exp) == TARGET_MEM_REF)
883 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
884 }
885 }
886 else if (TREE_CODE (exp) == STRING_CST)
887 {
888 /* STRING_CST are the only constant objects we allow to be not
889 wrapped inside a CONST_DECL. */
890 align = TYPE_ALIGN (TREE_TYPE (exp));
891 if (CONSTANT_CLASS_P (exp))
892 align = targetm.constant_alignment (exp, align);
893
894 known_alignment = true;
895 }
896
897 /* If there is a non-constant offset part extract the maximum
898 alignment that can prevail. */
899 if (offset)
900 {
901 unsigned int trailing_zeros = tree_ctz (offset);
902 if (trailing_zeros < HOST_BITS_PER_INT)
903 {
904 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
905 if (inner)
906 align = MIN (align, inner);
907 }
908 }
909
910 /* Account for the alignment of runtime coefficients, so that the constant
911 bitpos is guaranteed to be accurate. */
912 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
913 if (alt_align != 0 && alt_align < align)
914 {
915 align = alt_align;
916 known_alignment = false;
917 }
918
919 *alignp = align;
920 *bitposp = bitpos.coeffs[0] & (align - 1);
921 return known_alignment;
922 }
923
924 /* For a memory reference expression EXP compute values M and N such that M
925 divides (&EXP - N) and such that N < M. If these numbers can be determined,
926 store M in alignp and N in *BITPOSP and return true. Otherwise return false
927 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
928
929 bool
930 get_object_alignment_1 (tree exp, unsigned int *alignp,
931 unsigned HOST_WIDE_INT *bitposp)
932 {
933 return get_object_alignment_2 (exp, alignp, bitposp, false);
934 }
935
936 /* Return the alignment in bits of EXP, an object. */
937
938 unsigned int
939 get_object_alignment (tree exp)
940 {
941 unsigned HOST_WIDE_INT bitpos = 0;
942 unsigned int align;
943
944 get_object_alignment_1 (exp, &align, &bitpos);
945
946 /* align and bitpos now specify known low bits of the pointer.
947 ptr & (align - 1) == bitpos. */
948
949 if (bitpos != 0)
950 align = least_bit_hwi (bitpos);
951 return align;
952 }
953
954 /* For a pointer valued expression EXP compute values M and N such that M
955 divides (EXP - N) and such that N < M. If these numbers can be determined,
956 store M in alignp and N in *BITPOSP and return true. Return false if
957 the results are just a conservative approximation.
958
959 If EXP is not a pointer, false is returned too. */
960
961 bool
962 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
963 unsigned HOST_WIDE_INT *bitposp)
964 {
965 STRIP_NOPS (exp);
966
967 if (TREE_CODE (exp) == ADDR_EXPR)
968 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
969 alignp, bitposp, true);
970 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
971 {
972 unsigned int align;
973 unsigned HOST_WIDE_INT bitpos;
974 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
975 &align, &bitpos);
976 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
977 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
978 else
979 {
980 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
981 if (trailing_zeros < HOST_BITS_PER_INT)
982 {
983 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
984 if (inner)
985 align = MIN (align, inner);
986 }
987 }
988 *alignp = align;
989 *bitposp = bitpos & (align - 1);
990 return res;
991 }
992 else if (TREE_CODE (exp) == SSA_NAME
993 && POINTER_TYPE_P (TREE_TYPE (exp)))
994 {
995 unsigned int ptr_align, ptr_misalign;
996 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
997
998 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
999 {
1000 *bitposp = ptr_misalign * BITS_PER_UNIT;
1001 *alignp = ptr_align * BITS_PER_UNIT;
1002 /* Make sure to return a sensible alignment when the multiplication
1003 by BITS_PER_UNIT overflowed. */
1004 if (*alignp == 0)
1005 *alignp = 1u << (HOST_BITS_PER_INT - 1);
1006 /* We cannot really tell whether this result is an approximation. */
1007 return false;
1008 }
1009 else
1010 {
1011 *bitposp = 0;
1012 *alignp = BITS_PER_UNIT;
1013 return false;
1014 }
1015 }
1016 else if (TREE_CODE (exp) == INTEGER_CST)
1017 {
1018 *alignp = BIGGEST_ALIGNMENT;
1019 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
1020 & (BIGGEST_ALIGNMENT - 1));
1021 return true;
1022 }
1023
1024 *bitposp = 0;
1025 *alignp = BITS_PER_UNIT;
1026 return false;
1027 }
1028
1029 /* Return the alignment in bits of EXP, a pointer valued expression.
1030 The alignment returned is, by default, the alignment of the thing that
1031 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
1032
1033 Otherwise, look at the expression to see if we can do better, i.e., if the
1034 expression is actually pointing at an object whose alignment is tighter. */
1035
1036 unsigned int
1037 get_pointer_alignment (tree exp)
1038 {
1039 unsigned HOST_WIDE_INT bitpos = 0;
1040 unsigned int align;
1041
1042 get_pointer_alignment_1 (exp, &align, &bitpos);
1043
1044 /* align and bitpos now specify known low bits of the pointer.
1045 ptr & (align - 1) == bitpos. */
1046
1047 if (bitpos != 0)
1048 align = least_bit_hwi (bitpos);
1049
1050 return align;
1051 }
1052
1053 /* Return the number of leading non-zero elements in the sequence
1054 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
1055 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
1056
1057 unsigned
1058 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
1059 {
1060 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1061
1062 unsigned n;
1063
1064 if (eltsize == 1)
1065 {
1066 /* Optimize the common case of plain char. */
1067 for (n = 0; n < maxelts; n++)
1068 {
1069 const char *elt = (const char*) ptr + n;
1070 if (!*elt)
1071 break;
1072 }
1073 }
1074 else
1075 {
1076 for (n = 0; n < maxelts; n++)
1077 {
1078 const char *elt = (const char*) ptr + n * eltsize;
1079 if (!memcmp (elt, "\0\0\0\0", eltsize))
1080 break;
1081 }
1082 }
1083 return n;
1084 }
1085
1086 /* For a call EXPR at LOC to a function FNAME that expects a string
1087 in the argument ARG, issue a diagnostic due to it being a called
1088 with an argument that is a character array with no terminating
1089 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
1090 of characters in which the NUL is expected. Either EXPR or FNAME
1091 may be null but noth both. SIZE may be null when BNDRNG is null. */
1092
1093 void
1094 warn_string_no_nul (location_t loc, tree expr, const char *fname,
1095 tree arg, tree decl, tree size /* = NULL_TREE */,
1096 bool exact /* = false */,
1097 const wide_int bndrng[2] /* = NULL */)
1098 {
1099 if ((expr && TREE_NO_WARNING (expr)) || TREE_NO_WARNING (arg))
1100 return;
1101
1102 loc = expansion_point_location_if_in_system_header (loc);
1103 bool warned;
1104
1105 /* Format the bound range as a string to keep the nuber of messages
1106 from exploding. */
1107 char bndstr[80];
1108 *bndstr = 0;
1109 if (bndrng)
1110 {
1111 if (bndrng[0] == bndrng[1])
1112 sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
1113 else
1114 sprintf (bndstr, "[%llu, %llu]",
1115 (unsigned long long) bndrng[0].to_uhwi (),
1116 (unsigned long long) bndrng[1].to_uhwi ());
1117 }
1118
1119 const tree maxobjsize = max_object_size ();
1120 const wide_int maxsiz = wi::to_wide (maxobjsize);
1121 if (expr)
1122 {
1123 tree func = get_callee_fndecl (expr);
1124 if (bndrng)
1125 {
1126 if (wi::ltu_p (maxsiz, bndrng[0]))
1127 warned = warning_at (loc, OPT_Wstringop_overread,
1128 "%K%qD specified bound %s exceeds "
1129 "maximum object size %E",
1130 expr, func, bndstr, maxobjsize);
1131 else
1132 {
1133 bool maybe = wi::to_wide (size) == bndrng[0];
1134 warned = warning_at (loc, OPT_Wstringop_overread,
1135 exact
1136 ? G_("%K%qD specified bound %s exceeds "
1137 "the size %E of unterminated array")
1138 : (maybe
1139 ? G_("%K%qD specified bound %s may "
1140 "exceed the size of at most %E "
1141 "of unterminated array")
1142 : G_("%K%qD specified bound %s exceeds "
1143 "the size of at most %E "
1144 "of unterminated array")),
1145 expr, func, bndstr, size);
1146 }
1147 }
1148 else
1149 warned = warning_at (loc, OPT_Wstringop_overread,
1150 "%K%qD argument missing terminating nul",
1151 expr, func);
1152 }
1153 else
1154 {
1155 if (bndrng)
1156 {
1157 if (wi::ltu_p (maxsiz, bndrng[0]))
1158 warned = warning_at (loc, OPT_Wstringop_overread,
1159 "%qs specified bound %s exceeds "
1160 "maximum object size %E",
1161 fname, bndstr, maxobjsize);
1162 else
1163 {
1164 bool maybe = wi::to_wide (size) == bndrng[0];
1165 warned = warning_at (loc, OPT_Wstringop_overread,
1166 exact
1167 ? G_("%qs specified bound %s exceeds "
1168 "the size %E of unterminated array")
1169 : (maybe
1170 ? G_("%qs specified bound %s may "
1171 "exceed the size of at most %E "
1172 "of unterminated array")
1173 : G_("%qs specified bound %s exceeds "
1174 "the size of at most %E "
1175 "of unterminated array")),
1176 fname, bndstr, size);
1177 }
1178 }
1179 else
1180 warned = warning_at (loc, OPT_Wstringop_overread,
1181 "%qsargument missing terminating nul",
1182 fname);
1183 }
1184
1185 if (warned)
1186 {
1187 inform (DECL_SOURCE_LOCATION (decl),
1188 "referenced argument declared here");
1189 TREE_NO_WARNING (arg) = 1;
1190 if (expr)
1191 TREE_NO_WARNING (expr) = 1;
1192 }
1193 }
1194
1195 /* For a call EXPR (which may be null) that expects a string argument
1196 SRC as an argument, returns false if SRC is a character array with
1197 no terminating NUL. When nonnull, BOUND is the number of characters
1198 in which to expect the terminating NUL. RDONLY is true for read-only
1199 accesses such as strcmp, false for read-write such as strcpy. When
1200 EXPR is also issues a warning. */
1201
1202 bool
1203 check_nul_terminated_array (tree expr, tree src,
1204 tree bound /* = NULL_TREE */)
1205 {
1206 /* The constant size of the array SRC points to. The actual size
1207 may be less of EXACT is true, but not more. */
1208 tree size;
1209 /* True if SRC involves a non-constant offset into the array. */
1210 bool exact;
1211 /* The unterminated constant array SRC points to. */
1212 tree nonstr = unterminated_array (src, &size, &exact);
1213 if (!nonstr)
1214 return true;
1215
1216 /* NONSTR refers to the non-nul terminated constant array and SIZE
1217 is the constant size of the array in bytes. EXACT is true when
1218 SIZE is exact. */
1219
1220 wide_int bndrng[2];
1221 if (bound)
1222 {
1223 if (TREE_CODE (bound) == INTEGER_CST)
1224 bndrng[0] = bndrng[1] = wi::to_wide (bound);
1225 else
1226 {
1227 value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1);
1228 if (rng != VR_RANGE)
1229 return true;
1230 }
1231
1232 if (exact)
1233 {
1234 if (wi::leu_p (bndrng[0], wi::to_wide (size)))
1235 return true;
1236 }
1237 else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
1238 return true;
1239 }
1240
1241 if (expr)
1242 warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr,
1243 size, exact, bound ? bndrng : NULL);
1244
1245 return false;
1246 }
1247
1248 /* If EXP refers to an unterminated constant character array return
1249 the declaration of the object of which the array is a member or
1250 element and if SIZE is not null, set *SIZE to the size of
1251 the unterminated array and set *EXACT if the size is exact or
1252 clear it otherwise. Otherwise return null. */
1253
1254 tree
1255 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
1256 {
1257 /* C_STRLEN will return NULL and set DECL in the info
1258 structure if EXP references a unterminated array. */
1259 c_strlen_data lendata = { };
1260 tree len = c_strlen (exp, 1, &lendata);
1261 if (len == NULL_TREE && lendata.minlen && lendata.decl)
1262 {
1263 if (size)
1264 {
1265 len = lendata.minlen;
1266 if (lendata.off)
1267 {
1268 /* Constant offsets are already accounted for in LENDATA.MINLEN,
1269 but not in a SSA_NAME + CST expression. */
1270 if (TREE_CODE (lendata.off) == INTEGER_CST)
1271 *exact = true;
1272 else if (TREE_CODE (lendata.off) == PLUS_EXPR
1273 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
1274 {
1275 /* Subtract the offset from the size of the array. */
1276 *exact = false;
1277 tree temp = TREE_OPERAND (lendata.off, 1);
1278 temp = fold_convert (ssizetype, temp);
1279 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
1280 }
1281 else
1282 *exact = false;
1283 }
1284 else
1285 *exact = true;
1286
1287 *size = len;
1288 }
1289 return lendata.decl;
1290 }
1291
1292 return NULL_TREE;
1293 }
1294
1295 /* Compute the length of a null-terminated character string or wide
1296 character string handling character sizes of 1, 2, and 4 bytes.
1297 TREE_STRING_LENGTH is not the right way because it evaluates to
1298 the size of the character array in bytes (as opposed to characters)
1299 and because it can contain a zero byte in the middle.
1300
1301 ONLY_VALUE should be nonzero if the result is not going to be emitted
1302 into the instruction stream and zero if it is going to be expanded.
1303 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
1304 is returned, otherwise NULL, since
1305 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
1306 evaluate the side-effects.
1307
1308 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
1309 accesses. Note that this implies the result is not going to be emitted
1310 into the instruction stream.
1311
1312 Additional information about the string accessed may be recorded
1313 in DATA. For example, if ARG references an unterminated string,
1314 then the declaration will be stored in the DECL field. If the
1315 length of the unterminated string can be determined, it'll be
1316 stored in the LEN field. Note this length could well be different
1317 than what a C strlen call would return.
1318
1319 ELTSIZE is 1 for normal single byte character strings, and 2 or
1320 4 for wide characer strings. ELTSIZE is by default 1.
1321
1322 The value returned is of type `ssizetype'. */
1323
1324 tree
1325 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
1326 {
1327 /* If we were not passed a DATA pointer, then get one to a local
1328 structure. That avoids having to check DATA for NULL before
1329 each time we want to use it. */
1330 c_strlen_data local_strlen_data = { };
1331 if (!data)
1332 data = &local_strlen_data;
1333
1334 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1335
1336 tree src = STRIP_NOPS (arg);
1337 if (TREE_CODE (src) == COND_EXPR
1338 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1339 {
1340 tree len1, len2;
1341
1342 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1343 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
1344 if (tree_int_cst_equal (len1, len2))
1345 return len1;
1346 }
1347
1348 if (TREE_CODE (src) == COMPOUND_EXPR
1349 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1350 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1351
1352 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
1353
1354 /* Offset from the beginning of the string in bytes. */
1355 tree byteoff;
1356 tree memsize;
1357 tree decl;
1358 src = string_constant (src, &byteoff, &memsize, &decl);
1359 if (src == 0)
1360 return NULL_TREE;
1361
1362 /* Determine the size of the string element. */
1363 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
1364 return NULL_TREE;
1365
1366 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
1367 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
1368 in case the latter is less than the size of the array, such as when
1369 SRC refers to a short string literal used to initialize a large array.
1370 In that case, the elements of the array after the terminating NUL are
1371 all NUL. */
1372 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
1373 strelts = strelts / eltsize;
1374
1375 if (!tree_fits_uhwi_p (memsize))
1376 return NULL_TREE;
1377
1378 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
1379
1380 /* PTR can point to the byte representation of any string type, including
1381 char* and wchar_t*. */
1382 const char *ptr = TREE_STRING_POINTER (src);
1383
1384 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
1385 {
1386 /* The code below works only for single byte character types. */
1387 if (eltsize != 1)
1388 return NULL_TREE;
1389
1390 /* If the string has an internal NUL character followed by any
1391 non-NUL characters (e.g., "foo\0bar"), we can't compute
1392 the offset to the following NUL if we don't know where to
1393 start searching for it. */
1394 unsigned len = string_length (ptr, eltsize, strelts);
1395
1396 /* Return when an embedded null character is found or none at all.
1397 In the latter case, set the DECL/LEN field in the DATA structure
1398 so that callers may examine them. */
1399 if (len + 1 < strelts)
1400 return NULL_TREE;
1401 else if (len >= maxelts)
1402 {
1403 data->decl = decl;
1404 data->off = byteoff;
1405 data->minlen = ssize_int (len);
1406 return NULL_TREE;
1407 }
1408
1409 /* For empty strings the result should be zero. */
1410 if (len == 0)
1411 return ssize_int (0);
1412
1413 /* We don't know the starting offset, but we do know that the string
1414 has no internal zero bytes. If the offset falls within the bounds
1415 of the string subtract the offset from the length of the string,
1416 and return that. Otherwise the length is zero. Take care to
1417 use SAVE_EXPR in case the OFFSET has side-effects. */
1418 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
1419 : byteoff;
1420 offsave = fold_convert_loc (loc, sizetype, offsave);
1421 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
1422 size_int (len));
1423 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
1424 offsave);
1425 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
1426 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
1427 build_zero_cst (ssizetype));
1428 }
1429
1430 /* Offset from the beginning of the string in elements. */
1431 HOST_WIDE_INT eltoff;
1432
1433 /* We have a known offset into the string. Start searching there for
1434 a null character if we can represent it as a single HOST_WIDE_INT. */
1435 if (byteoff == 0)
1436 eltoff = 0;
1437 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1438 eltoff = -1;
1439 else
1440 eltoff = tree_to_uhwi (byteoff) / eltsize;
1441
1442 /* If the offset is known to be out of bounds, warn, and call strlen at
1443 runtime. */
1444 if (eltoff < 0 || eltoff >= maxelts)
1445 {
1446 /* Suppress multiple warnings for propagated constant strings. */
1447 if (only_value != 2
1448 && !TREE_NO_WARNING (arg)
1449 && warning_at (loc, OPT_Warray_bounds,
1450 "offset %qwi outside bounds of constant string",
1451 eltoff))
1452 {
1453 if (decl)
1454 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
1455 TREE_NO_WARNING (arg) = 1;
1456 }
1457 return NULL_TREE;
1458 }
1459
1460 /* If eltoff is larger than strelts but less than maxelts the
1461 string length is zero, since the excess memory will be zero. */
1462 if (eltoff > strelts)
1463 return ssize_int (0);
1464
1465 /* Use strlen to search for the first zero byte. Since any strings
1466 constructed with build_string will have nulls appended, we win even
1467 if we get handed something like (char[4])"abcd".
1468
1469 Since ELTOFF is our starting index into the string, no further
1470 calculation is needed. */
1471 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
1472 strelts - eltoff);
1473
1474 /* Don't know what to return if there was no zero termination.
1475 Ideally this would turn into a gcc_checking_assert over time.
1476 Set DECL/LEN so callers can examine them. */
1477 if (len >= maxelts - eltoff)
1478 {
1479 data->decl = decl;
1480 data->off = byteoff;
1481 data->minlen = ssize_int (len);
1482 return NULL_TREE;
1483 }
1484
1485 return ssize_int (len);
1486 }
1487
1488 /* Return a constant integer corresponding to target reading
1489 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
1490 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
1491 are assumed to be zero, otherwise it reads as many characters
1492 as needed. */
1493
1494 rtx
1495 c_readstr (const char *str, scalar_int_mode mode,
1496 bool null_terminated_p/*=true*/)
1497 {
1498 HOST_WIDE_INT ch;
1499 unsigned int i, j;
1500 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
1501
1502 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
1503 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
1504 / HOST_BITS_PER_WIDE_INT;
1505
1506 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
1507 for (i = 0; i < len; i++)
1508 tmp[i] = 0;
1509
1510 ch = 1;
1511 for (i = 0; i < GET_MODE_SIZE (mode); i++)
1512 {
1513 j = i;
1514 if (WORDS_BIG_ENDIAN)
1515 j = GET_MODE_SIZE (mode) - i - 1;
1516 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
1517 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1518 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
1519 j *= BITS_PER_UNIT;
1520
1521 if (ch || !null_terminated_p)
1522 ch = (unsigned char) str[i];
1523 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
1524 }
1525
1526 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1527 return immed_wide_int_const (c, mode);
1528 }
1529
1530 /* Cast a target constant CST to target CHAR and if that value fits into
1531 host char type, return zero and put that value into variable pointed to by
1532 P. */
1533
1534 static int
1535 target_char_cast (tree cst, char *p)
1536 {
1537 unsigned HOST_WIDE_INT val, hostval;
1538
1539 if (TREE_CODE (cst) != INTEGER_CST
1540 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
1541 return 1;
1542
1543 /* Do not care if it fits or not right here. */
1544 val = TREE_INT_CST_LOW (cst);
1545
1546 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
1547 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
1548
1549 hostval = val;
1550 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
1551 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
1552
1553 if (val != hostval)
1554 return 1;
1555
1556 *p = hostval;
1557 return 0;
1558 }
1559
1560 /* Similar to save_expr, but assumes that arbitrary code is not executed
1561 in between the multiple evaluations. In particular, we assume that a
1562 non-addressable local variable will not be modified. */
1563
1564 static tree
1565 builtin_save_expr (tree exp)
1566 {
1567 if (TREE_CODE (exp) == SSA_NAME
1568 || (TREE_ADDRESSABLE (exp) == 0
1569 && (TREE_CODE (exp) == PARM_DECL
1570 || (VAR_P (exp) && !TREE_STATIC (exp)))))
1571 return exp;
1572
1573 return save_expr (exp);
1574 }
1575
1576 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1577 times to get the address of either a higher stack frame, or a return
1578 address located within it (depending on FNDECL_CODE). */
1579
1580 static rtx
1581 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
1582 {
1583 int i;
1584 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
1585 if (tem == NULL_RTX)
1586 {
1587 /* For a zero count with __builtin_return_address, we don't care what
1588 frame address we return, because target-specific definitions will
1589 override us. Therefore frame pointer elimination is OK, and using
1590 the soft frame pointer is OK.
1591
1592 For a nonzero count, or a zero count with __builtin_frame_address,
1593 we require a stable offset from the current frame pointer to the
1594 previous one, so we must use the hard frame pointer, and
1595 we must disable frame pointer elimination. */
1596 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1597 tem = frame_pointer_rtx;
1598 else
1599 {
1600 tem = hard_frame_pointer_rtx;
1601
1602 /* Tell reload not to eliminate the frame pointer. */
1603 crtl->accesses_prior_frames = 1;
1604 }
1605 }
1606
1607 if (count > 0)
1608 SETUP_FRAME_ADDRESSES ();
1609
1610 /* On the SPARC, the return address is not in the frame, it is in a
1611 register. There is no way to access it off of the current frame
1612 pointer, but it can be accessed off the previous frame pointer by
1613 reading the value from the register window save area. */
1614 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1615 count--;
1616
1617 /* Scan back COUNT frames to the specified frame. */
1618 for (i = 0; i < count; i++)
1619 {
1620 /* Assume the dynamic chain pointer is in the word that the
1621 frame address points to, unless otherwise specified. */
1622 tem = DYNAMIC_CHAIN_ADDRESS (tem);
1623 tem = memory_address (Pmode, tem);
1624 tem = gen_frame_mem (Pmode, tem);
1625 tem = copy_to_reg (tem);
1626 }
1627
1628 /* For __builtin_frame_address, return what we've got. But, on
1629 the SPARC for example, we may have to add a bias. */
1630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
1631 return FRAME_ADDR_RTX (tem);
1632
1633 /* For __builtin_return_address, get the return address from that frame. */
1634 #ifdef RETURN_ADDR_RTX
1635 tem = RETURN_ADDR_RTX (count, tem);
1636 #else
1637 tem = memory_address (Pmode,
1638 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1639 tem = gen_frame_mem (Pmode, tem);
1640 #endif
1641 return tem;
1642 }
1643
1644 /* Alias set used for setjmp buffer. */
1645 static alias_set_type setjmp_alias_set = -1;
1646
1647 /* Construct the leading half of a __builtin_setjmp call. Control will
1648 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1649 exception handling code. */
1650
1651 void
1652 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1653 {
1654 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1655 rtx stack_save;
1656 rtx mem;
1657
1658 if (setjmp_alias_set == -1)
1659 setjmp_alias_set = new_alias_set ();
1660
1661 buf_addr = convert_memory_address (Pmode, buf_addr);
1662
1663 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1664
1665 /* We store the frame pointer and the address of receiver_label in
1666 the buffer and use the rest of it for the stack save area, which
1667 is machine-dependent. */
1668
1669 mem = gen_rtx_MEM (Pmode, buf_addr);
1670 set_mem_alias_set (mem, setjmp_alias_set);
1671 emit_move_insn (mem, hard_frame_pointer_rtx);
1672
1673 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1674 GET_MODE_SIZE (Pmode))),
1675 set_mem_alias_set (mem, setjmp_alias_set);
1676
1677 emit_move_insn (validize_mem (mem),
1678 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1679
1680 stack_save = gen_rtx_MEM (sa_mode,
1681 plus_constant (Pmode, buf_addr,
1682 2 * GET_MODE_SIZE (Pmode)));
1683 set_mem_alias_set (stack_save, setjmp_alias_set);
1684 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1685
1686 /* If there is further processing to do, do it. */
1687 if (targetm.have_builtin_setjmp_setup ())
1688 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1689
1690 /* We have a nonlocal label. */
1691 cfun->has_nonlocal_label = 1;
1692 }
1693
1694 /* Construct the trailing part of a __builtin_setjmp call. This is
1695 also called directly by the SJLJ exception handling code.
1696 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1697
1698 void
1699 expand_builtin_setjmp_receiver (rtx receiver_label)
1700 {
1701 rtx chain;
1702
1703 /* Mark the FP as used when we get here, so we have to make sure it's
1704 marked as used by this function. */
1705 emit_use (hard_frame_pointer_rtx);
1706
1707 /* Mark the static chain as clobbered here so life information
1708 doesn't get messed up for it. */
1709 chain = rtx_for_static_chain (current_function_decl, true);
1710 if (chain && REG_P (chain))
1711 emit_clobber (chain);
1712
1713 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1714 {
1715 /* If the argument pointer can be eliminated in favor of the
1716 frame pointer, we don't need to restore it. We assume here
1717 that if such an elimination is present, it can always be used.
1718 This is the case on all known machines; if we don't make this
1719 assumption, we do unnecessary saving on many machines. */
1720 size_t i;
1721 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1722
1723 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1724 if (elim_regs[i].from == ARG_POINTER_REGNUM
1725 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1726 break;
1727
1728 if (i == ARRAY_SIZE (elim_regs))
1729 {
1730 /* Now restore our arg pointer from the address at which it
1731 was saved in our stack frame. */
1732 emit_move_insn (crtl->args.internal_arg_pointer,
1733 copy_to_reg (get_arg_pointer_save_area ()));
1734 }
1735 }
1736
1737 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1738 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1739 else if (targetm.have_nonlocal_goto_receiver ())
1740 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1741 else
1742 { /* Nothing */ }
1743
1744 /* We must not allow the code we just generated to be reordered by
1745 scheduling. Specifically, the update of the frame pointer must
1746 happen immediately, not later. */
1747 emit_insn (gen_blockage ());
1748 }
1749
1750 /* __builtin_longjmp is passed a pointer to an array of five words (not
1751 all will be used on all machines). It operates similarly to the C
1752 library function of the same name, but is more efficient. Much of
1753 the code below is copied from the handling of non-local gotos. */
1754
1755 static void
1756 expand_builtin_longjmp (rtx buf_addr, rtx value)
1757 {
1758 rtx fp, lab, stack;
1759 rtx_insn *insn, *last;
1760 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1761
1762 /* DRAP is needed for stack realign if longjmp is expanded to current
1763 function */
1764 if (SUPPORTS_STACK_ALIGNMENT)
1765 crtl->need_drap = true;
1766
1767 if (setjmp_alias_set == -1)
1768 setjmp_alias_set = new_alias_set ();
1769
1770 buf_addr = convert_memory_address (Pmode, buf_addr);
1771
1772 buf_addr = force_reg (Pmode, buf_addr);
1773
1774 /* We require that the user must pass a second argument of 1, because
1775 that is what builtin_setjmp will return. */
1776 gcc_assert (value == const1_rtx);
1777
1778 last = get_last_insn ();
1779 if (targetm.have_builtin_longjmp ())
1780 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1781 else
1782 {
1783 fp = gen_rtx_MEM (Pmode, buf_addr);
1784 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1785 GET_MODE_SIZE (Pmode)));
1786
1787 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1788 2 * GET_MODE_SIZE (Pmode)));
1789 set_mem_alias_set (fp, setjmp_alias_set);
1790 set_mem_alias_set (lab, setjmp_alias_set);
1791 set_mem_alias_set (stack, setjmp_alias_set);
1792
1793 /* Pick up FP, label, and SP from the block and jump. This code is
1794 from expand_goto in stmt.c; see there for detailed comments. */
1795 if (targetm.have_nonlocal_goto ())
1796 /* We have to pass a value to the nonlocal_goto pattern that will
1797 get copied into the static_chain pointer, but it does not matter
1798 what that value is, because builtin_setjmp does not use it. */
1799 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1800 else
1801 {
1802 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1803 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1804
1805 lab = copy_to_reg (lab);
1806
1807 /* Restore the frame pointer and stack pointer. We must use a
1808 temporary since the setjmp buffer may be a local. */
1809 fp = copy_to_reg (fp);
1810 emit_stack_restore (SAVE_NONLOCAL, stack);
1811
1812 /* Ensure the frame pointer move is not optimized. */
1813 emit_insn (gen_blockage ());
1814 emit_clobber (hard_frame_pointer_rtx);
1815 emit_clobber (frame_pointer_rtx);
1816 emit_move_insn (hard_frame_pointer_rtx, fp);
1817
1818 emit_use (hard_frame_pointer_rtx);
1819 emit_use (stack_pointer_rtx);
1820 emit_indirect_jump (lab);
1821 }
1822 }
1823
1824 /* Search backwards and mark the jump insn as a non-local goto.
1825 Note that this precludes the use of __builtin_longjmp to a
1826 __builtin_setjmp target in the same function. However, we've
1827 already cautioned the user that these functions are for
1828 internal exception handling use only. */
1829 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1830 {
1831 gcc_assert (insn != last);
1832
1833 if (JUMP_P (insn))
1834 {
1835 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1836 break;
1837 }
1838 else if (CALL_P (insn))
1839 break;
1840 }
1841 }
1842
1843 static inline bool
1844 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1845 {
1846 return (iter->i < iter->n);
1847 }
1848
1849 /* This function validates the types of a function call argument list
1850 against a specified list of tree_codes. If the last specifier is a 0,
1851 that represents an ellipsis, otherwise the last specifier must be a
1852 VOID_TYPE. */
1853
1854 static bool
1855 validate_arglist (const_tree callexpr, ...)
1856 {
1857 enum tree_code code;
1858 bool res = 0;
1859 va_list ap;
1860 const_call_expr_arg_iterator iter;
1861 const_tree arg;
1862
1863 va_start (ap, callexpr);
1864 init_const_call_expr_arg_iterator (callexpr, &iter);
1865
1866 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1867 tree fn = CALL_EXPR_FN (callexpr);
1868 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1869
1870 for (unsigned argno = 1; ; ++argno)
1871 {
1872 code = (enum tree_code) va_arg (ap, int);
1873
1874 switch (code)
1875 {
1876 case 0:
1877 /* This signifies an ellipses, any further arguments are all ok. */
1878 res = true;
1879 goto end;
1880 case VOID_TYPE:
1881 /* This signifies an endlink, if no arguments remain, return
1882 true, otherwise return false. */
1883 res = !more_const_call_expr_args_p (&iter);
1884 goto end;
1885 case POINTER_TYPE:
1886 /* The actual argument must be nonnull when either the whole
1887 called function has been declared nonnull, or when the formal
1888 argument corresponding to the actual argument has been. */
1889 if (argmap
1890 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1891 {
1892 arg = next_const_call_expr_arg (&iter);
1893 if (!validate_arg (arg, code) || integer_zerop (arg))
1894 goto end;
1895 break;
1896 }
1897 /* FALLTHRU */
1898 default:
1899 /* If no parameters remain or the parameter's code does not
1900 match the specified code, return false. Otherwise continue
1901 checking any remaining arguments. */
1902 arg = next_const_call_expr_arg (&iter);
1903 if (!validate_arg (arg, code))
1904 goto end;
1905 break;
1906 }
1907 }
1908
1909 /* We need gotos here since we can only have one VA_CLOSE in a
1910 function. */
1911 end: ;
1912 va_end (ap);
1913
1914 BITMAP_FREE (argmap);
1915
1916 return res;
1917 }
1918
1919 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1920 and the address of the save area. */
1921
1922 static rtx
1923 expand_builtin_nonlocal_goto (tree exp)
1924 {
1925 tree t_label, t_save_area;
1926 rtx r_label, r_save_area, r_fp, r_sp;
1927 rtx_insn *insn;
1928
1929 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1930 return NULL_RTX;
1931
1932 t_label = CALL_EXPR_ARG (exp, 0);
1933 t_save_area = CALL_EXPR_ARG (exp, 1);
1934
1935 r_label = expand_normal (t_label);
1936 r_label = convert_memory_address (Pmode, r_label);
1937 r_save_area = expand_normal (t_save_area);
1938 r_save_area = convert_memory_address (Pmode, r_save_area);
1939 /* Copy the address of the save location to a register just in case it was
1940 based on the frame pointer. */
1941 r_save_area = copy_to_reg (r_save_area);
1942 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1943 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1944 plus_constant (Pmode, r_save_area,
1945 GET_MODE_SIZE (Pmode)));
1946
1947 crtl->has_nonlocal_goto = 1;
1948
1949 /* ??? We no longer need to pass the static chain value, afaik. */
1950 if (targetm.have_nonlocal_goto ())
1951 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1952 else
1953 {
1954 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1955 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1956
1957 r_label = copy_to_reg (r_label);
1958
1959 /* Restore the frame pointer and stack pointer. We must use a
1960 temporary since the setjmp buffer may be a local. */
1961 r_fp = copy_to_reg (r_fp);
1962 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1963
1964 /* Ensure the frame pointer move is not optimized. */
1965 emit_insn (gen_blockage ());
1966 emit_clobber (hard_frame_pointer_rtx);
1967 emit_clobber (frame_pointer_rtx);
1968 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1969
1970 /* USE of hard_frame_pointer_rtx added for consistency;
1971 not clear if really needed. */
1972 emit_use (hard_frame_pointer_rtx);
1973 emit_use (stack_pointer_rtx);
1974
1975 /* If the architecture is using a GP register, we must
1976 conservatively assume that the target function makes use of it.
1977 The prologue of functions with nonlocal gotos must therefore
1978 initialize the GP register to the appropriate value, and we
1979 must then make sure that this value is live at the point
1980 of the jump. (Note that this doesn't necessarily apply
1981 to targets with a nonlocal_goto pattern; they are free
1982 to implement it in their own way. Note also that this is
1983 a no-op if the GP register is a global invariant.) */
1984 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1985 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1986 emit_use (pic_offset_table_rtx);
1987
1988 emit_indirect_jump (r_label);
1989 }
1990
1991 /* Search backwards to the jump insn and mark it as a
1992 non-local goto. */
1993 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1994 {
1995 if (JUMP_P (insn))
1996 {
1997 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1998 break;
1999 }
2000 else if (CALL_P (insn))
2001 break;
2002 }
2003
2004 return const0_rtx;
2005 }
2006
2007 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
2008 (not all will be used on all machines) that was passed to __builtin_setjmp.
2009 It updates the stack pointer in that block to the current value. This is
2010 also called directly by the SJLJ exception handling code. */
2011
2012 void
2013 expand_builtin_update_setjmp_buf (rtx buf_addr)
2014 {
2015 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
2016 buf_addr = convert_memory_address (Pmode, buf_addr);
2017 rtx stack_save
2018 = gen_rtx_MEM (sa_mode,
2019 memory_address
2020 (sa_mode,
2021 plus_constant (Pmode, buf_addr,
2022 2 * GET_MODE_SIZE (Pmode))));
2023
2024 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2025 }
2026
2027 /* Expand a call to __builtin_prefetch. For a target that does not support
2028 data prefetch, evaluate the memory address argument in case it has side
2029 effects. */
2030
2031 static void
2032 expand_builtin_prefetch (tree exp)
2033 {
2034 tree arg0, arg1, arg2;
2035 int nargs;
2036 rtx op0, op1, op2;
2037
2038 if (!validate_arglist (exp, POINTER_TYPE, 0))
2039 return;
2040
2041 arg0 = CALL_EXPR_ARG (exp, 0);
2042
2043 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
2044 zero (read) and argument 2 (locality) defaults to 3 (high degree of
2045 locality). */
2046 nargs = call_expr_nargs (exp);
2047 if (nargs > 1)
2048 arg1 = CALL_EXPR_ARG (exp, 1);
2049 else
2050 arg1 = integer_zero_node;
2051 if (nargs > 2)
2052 arg2 = CALL_EXPR_ARG (exp, 2);
2053 else
2054 arg2 = integer_three_node;
2055
2056 /* Argument 0 is an address. */
2057 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
2058
2059 /* Argument 1 (read/write flag) must be a compile-time constant int. */
2060 if (TREE_CODE (arg1) != INTEGER_CST)
2061 {
2062 error ("second argument to %<__builtin_prefetch%> must be a constant");
2063 arg1 = integer_zero_node;
2064 }
2065 op1 = expand_normal (arg1);
2066 /* Argument 1 must be either zero or one. */
2067 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
2068 {
2069 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
2070 " using zero");
2071 op1 = const0_rtx;
2072 }
2073
2074 /* Argument 2 (locality) must be a compile-time constant int. */
2075 if (TREE_CODE (arg2) != INTEGER_CST)
2076 {
2077 error ("third argument to %<__builtin_prefetch%> must be a constant");
2078 arg2 = integer_zero_node;
2079 }
2080 op2 = expand_normal (arg2);
2081 /* Argument 2 must be 0, 1, 2, or 3. */
2082 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
2083 {
2084 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
2085 op2 = const0_rtx;
2086 }
2087
2088 if (targetm.have_prefetch ())
2089 {
2090 class expand_operand ops[3];
2091
2092 create_address_operand (&ops[0], op0);
2093 create_integer_operand (&ops[1], INTVAL (op1));
2094 create_integer_operand (&ops[2], INTVAL (op2));
2095 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
2096 return;
2097 }
2098
2099 /* Don't do anything with direct references to volatile memory, but
2100 generate code to handle other side effects. */
2101 if (!MEM_P (op0) && side_effects_p (op0))
2102 emit_insn (op0);
2103 }
2104
2105 /* Get a MEM rtx for expression EXP which is the address of an operand
2106 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
2107 the maximum length of the block of memory that might be accessed or
2108 NULL if unknown. */
2109
2110 static rtx
2111 get_memory_rtx (tree exp, tree len)
2112 {
2113 tree orig_exp = exp;
2114 rtx addr, mem;
2115
2116 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
2117 from its expression, for expr->a.b only <variable>.a.b is recorded. */
2118 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
2119 exp = TREE_OPERAND (exp, 0);
2120
2121 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2122 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2123
2124 /* Get an expression we can use to find the attributes to assign to MEM.
2125 First remove any nops. */
2126 while (CONVERT_EXPR_P (exp)
2127 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2128 exp = TREE_OPERAND (exp, 0);
2129
2130 /* Build a MEM_REF representing the whole accessed area as a byte blob,
2131 (as builtin stringops may alias with anything). */
2132 exp = fold_build2 (MEM_REF,
2133 build_array_type (char_type_node,
2134 build_range_type (sizetype,
2135 size_one_node, len)),
2136 exp, build_int_cst (ptr_type_node, 0));
2137
2138 /* If the MEM_REF has no acceptable address, try to get the base object
2139 from the original address we got, and build an all-aliasing
2140 unknown-sized access to that one. */
2141 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2142 set_mem_attributes (mem, exp, 0);
2143 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2144 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
2145 0))))
2146 {
2147 exp = build_fold_addr_expr (exp);
2148 exp = fold_build2 (MEM_REF,
2149 build_array_type (char_type_node,
2150 build_range_type (sizetype,
2151 size_zero_node,
2152 NULL)),
2153 exp, build_int_cst (ptr_type_node, 0));
2154 set_mem_attributes (mem, exp, 0);
2155 }
2156 set_mem_alias_set (mem, 0);
2157 return mem;
2158 }
2159 \f
2160 /* Built-in functions to perform an untyped call and return. */
2161
2162 #define apply_args_mode \
2163 (this_target_builtins->x_apply_args_mode)
2164 #define apply_result_mode \
2165 (this_target_builtins->x_apply_result_mode)
2166
2167 /* Return the size required for the block returned by __builtin_apply_args,
2168 and initialize apply_args_mode. */
2169
2170 static int
2171 apply_args_size (void)
2172 {
2173 static int size = -1;
2174 int align;
2175 unsigned int regno;
2176
2177 /* The values computed by this function never change. */
2178 if (size < 0)
2179 {
2180 /* The first value is the incoming arg-pointer. */
2181 size = GET_MODE_SIZE (Pmode);
2182
2183 /* The second value is the structure value address unless this is
2184 passed as an "invisible" first argument. */
2185 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2186 size += GET_MODE_SIZE (Pmode);
2187
2188 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2189 if (FUNCTION_ARG_REGNO_P (regno))
2190 {
2191 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
2192
2193 gcc_assert (mode != VOIDmode);
2194
2195 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2196 if (size % align != 0)
2197 size = CEIL (size, align) * align;
2198 size += GET_MODE_SIZE (mode);
2199 apply_args_mode[regno] = mode;
2200 }
2201 else
2202 {
2203 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2204 }
2205 }
2206 return size;
2207 }
2208
2209 /* Return the size required for the block returned by __builtin_apply,
2210 and initialize apply_result_mode. */
2211
2212 static int
2213 apply_result_size (void)
2214 {
2215 static int size = -1;
2216 int align, regno;
2217
2218 /* The values computed by this function never change. */
2219 if (size < 0)
2220 {
2221 size = 0;
2222
2223 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2224 if (targetm.calls.function_value_regno_p (regno))
2225 {
2226 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
2227
2228 gcc_assert (mode != VOIDmode);
2229
2230 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2231 if (size % align != 0)
2232 size = CEIL (size, align) * align;
2233 size += GET_MODE_SIZE (mode);
2234 apply_result_mode[regno] = mode;
2235 }
2236 else
2237 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2238
2239 /* Allow targets that use untyped_call and untyped_return to override
2240 the size so that machine-specific information can be stored here. */
2241 #ifdef APPLY_RESULT_SIZE
2242 size = APPLY_RESULT_SIZE;
2243 #endif
2244 }
2245 return size;
2246 }
2247
2248 /* Create a vector describing the result block RESULT. If SAVEP is true,
2249 the result block is used to save the values; otherwise it is used to
2250 restore the values. */
2251
2252 static rtx
2253 result_vector (int savep, rtx result)
2254 {
2255 int regno, size, align, nelts;
2256 fixed_size_mode mode;
2257 rtx reg, mem;
2258 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
2259
2260 size = nelts = 0;
2261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2262 if ((mode = apply_result_mode[regno]) != VOIDmode)
2263 {
2264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2265 if (size % align != 0)
2266 size = CEIL (size, align) * align;
2267 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
2268 mem = adjust_address (result, mode, size);
2269 savevec[nelts++] = (savep
2270 ? gen_rtx_SET (mem, reg)
2271 : gen_rtx_SET (reg, mem));
2272 size += GET_MODE_SIZE (mode);
2273 }
2274 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
2275 }
2276
2277 /* Save the state required to perform an untyped call with the same
2278 arguments as were passed to the current function. */
2279
2280 static rtx
2281 expand_builtin_apply_args_1 (void)
2282 {
2283 rtx registers, tem;
2284 int size, align, regno;
2285 fixed_size_mode mode;
2286 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
2287
2288 /* Create a block where the arg-pointer, structure value address,
2289 and argument registers can be saved. */
2290 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
2291
2292 /* Walk past the arg-pointer and structure value address. */
2293 size = GET_MODE_SIZE (Pmode);
2294 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2295 size += GET_MODE_SIZE (Pmode);
2296
2297 /* Save each register used in calling a function to the block. */
2298 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2299 if ((mode = apply_args_mode[regno]) != VOIDmode)
2300 {
2301 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2302 if (size % align != 0)
2303 size = CEIL (size, align) * align;
2304
2305 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2306
2307 emit_move_insn (adjust_address (registers, mode, size), tem);
2308 size += GET_MODE_SIZE (mode);
2309 }
2310
2311 /* Save the arg pointer to the block. */
2312 tem = copy_to_reg (crtl->args.internal_arg_pointer);
2313 /* We need the pointer as the caller actually passed them to us, not
2314 as we might have pretended they were passed. Make sure it's a valid
2315 operand, as emit_move_insn isn't expected to handle a PLUS. */
2316 if (STACK_GROWS_DOWNWARD)
2317 tem
2318 = force_operand (plus_constant (Pmode, tem,
2319 crtl->args.pretend_args_size),
2320 NULL_RTX);
2321 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
2322
2323 size = GET_MODE_SIZE (Pmode);
2324
2325 /* Save the structure value address unless this is passed as an
2326 "invisible" first argument. */
2327 if (struct_incoming_value)
2328 emit_move_insn (adjust_address (registers, Pmode, size),
2329 copy_to_reg (struct_incoming_value));
2330
2331 /* Return the address of the block. */
2332 return copy_addr_to_reg (XEXP (registers, 0));
2333 }
2334
2335 /* __builtin_apply_args returns block of memory allocated on
2336 the stack into which is stored the arg pointer, structure
2337 value address, static chain, and all the registers that might
2338 possibly be used in performing a function call. The code is
2339 moved to the start of the function so the incoming values are
2340 saved. */
2341
2342 static rtx
2343 expand_builtin_apply_args (void)
2344 {
2345 /* Don't do __builtin_apply_args more than once in a function.
2346 Save the result of the first call and reuse it. */
2347 if (apply_args_value != 0)
2348 return apply_args_value;
2349 {
2350 /* When this function is called, it means that registers must be
2351 saved on entry to this function. So we migrate the
2352 call to the first insn of this function. */
2353 rtx temp;
2354
2355 start_sequence ();
2356 temp = expand_builtin_apply_args_1 ();
2357 rtx_insn *seq = get_insns ();
2358 end_sequence ();
2359
2360 apply_args_value = temp;
2361
2362 /* Put the insns after the NOTE that starts the function.
2363 If this is inside a start_sequence, make the outer-level insn
2364 chain current, so the code is placed at the start of the
2365 function. If internal_arg_pointer is a non-virtual pseudo,
2366 it needs to be placed after the function that initializes
2367 that pseudo. */
2368 push_topmost_sequence ();
2369 if (REG_P (crtl->args.internal_arg_pointer)
2370 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
2371 emit_insn_before (seq, parm_birth_insn);
2372 else
2373 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
2374 pop_topmost_sequence ();
2375 return temp;
2376 }
2377 }
2378
2379 /* Perform an untyped call and save the state required to perform an
2380 untyped return of whatever value was returned by the given function. */
2381
2382 static rtx
2383 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
2384 {
2385 int size, align, regno;
2386 fixed_size_mode mode;
2387 rtx incoming_args, result, reg, dest, src;
2388 rtx_call_insn *call_insn;
2389 rtx old_stack_level = 0;
2390 rtx call_fusage = 0;
2391 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
2392
2393 arguments = convert_memory_address (Pmode, arguments);
2394
2395 /* Create a block where the return registers can be saved. */
2396 result = assign_stack_local (BLKmode, apply_result_size (), -1);
2397
2398 /* Fetch the arg pointer from the ARGUMENTS block. */
2399 incoming_args = gen_reg_rtx (Pmode);
2400 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
2401 if (!STACK_GROWS_DOWNWARD)
2402 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
2403 incoming_args, 0, OPTAB_LIB_WIDEN);
2404
2405 /* Push a new argument block and copy the arguments. Do not allow
2406 the (potential) memcpy call below to interfere with our stack
2407 manipulations. */
2408 do_pending_stack_adjust ();
2409 NO_DEFER_POP;
2410
2411 /* Save the stack with nonlocal if available. */
2412 if (targetm.have_save_stack_nonlocal ())
2413 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
2414 else
2415 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2416
2417 /* Allocate a block of memory onto the stack and copy the memory
2418 arguments to the outgoing arguments address. We can pass TRUE
2419 as the 4th argument because we just saved the stack pointer
2420 and will restore it right after the call. */
2421 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2422
2423 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
2424 may have already set current_function_calls_alloca to true.
2425 current_function_calls_alloca won't be set if argsize is zero,
2426 so we have to guarantee need_drap is true here. */
2427 if (SUPPORTS_STACK_ALIGNMENT)
2428 crtl->need_drap = true;
2429
2430 dest = virtual_outgoing_args_rtx;
2431 if (!STACK_GROWS_DOWNWARD)
2432 {
2433 if (CONST_INT_P (argsize))
2434 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
2435 else
2436 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
2437 }
2438 dest = gen_rtx_MEM (BLKmode, dest);
2439 set_mem_align (dest, PARM_BOUNDARY);
2440 src = gen_rtx_MEM (BLKmode, incoming_args);
2441 set_mem_align (src, PARM_BOUNDARY);
2442 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
2443
2444 /* Refer to the argument block. */
2445 apply_args_size ();
2446 arguments = gen_rtx_MEM (BLKmode, arguments);
2447 set_mem_align (arguments, PARM_BOUNDARY);
2448
2449 /* Walk past the arg-pointer and structure value address. */
2450 size = GET_MODE_SIZE (Pmode);
2451 if (struct_value)
2452 size += GET_MODE_SIZE (Pmode);
2453
2454 /* Restore each of the registers previously saved. Make USE insns
2455 for each of these registers for use in making the call. */
2456 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2457 if ((mode = apply_args_mode[regno]) != VOIDmode)
2458 {
2459 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2460 if (size % align != 0)
2461 size = CEIL (size, align) * align;
2462 reg = gen_rtx_REG (mode, regno);
2463 emit_move_insn (reg, adjust_address (arguments, mode, size));
2464 use_reg (&call_fusage, reg);
2465 size += GET_MODE_SIZE (mode);
2466 }
2467
2468 /* Restore the structure value address unless this is passed as an
2469 "invisible" first argument. */
2470 size = GET_MODE_SIZE (Pmode);
2471 if (struct_value)
2472 {
2473 rtx value = gen_reg_rtx (Pmode);
2474 emit_move_insn (value, adjust_address (arguments, Pmode, size));
2475 emit_move_insn (struct_value, value);
2476 if (REG_P (struct_value))
2477 use_reg (&call_fusage, struct_value);
2478 }
2479
2480 /* All arguments and registers used for the call are set up by now! */
2481 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
2482
2483 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
2484 and we don't want to load it into a register as an optimization,
2485 because prepare_call_address already did it if it should be done. */
2486 if (GET_CODE (function) != SYMBOL_REF)
2487 function = memory_address (FUNCTION_MODE, function);
2488
2489 /* Generate the actual call instruction and save the return value. */
2490 if (targetm.have_untyped_call ())
2491 {
2492 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
2493 emit_call_insn (targetm.gen_untyped_call (mem, result,
2494 result_vector (1, result)));
2495 }
2496 else if (targetm.have_call_value ())
2497 {
2498 rtx valreg = 0;
2499
2500 /* Locate the unique return register. It is not possible to
2501 express a call that sets more than one return register using
2502 call_value; use untyped_call for that. In fact, untyped_call
2503 only needs to save the return registers in the given block. */
2504 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2505 if ((mode = apply_result_mode[regno]) != VOIDmode)
2506 {
2507 gcc_assert (!valreg); /* have_untyped_call required. */
2508
2509 valreg = gen_rtx_REG (mode, regno);
2510 }
2511
2512 emit_insn (targetm.gen_call_value (valreg,
2513 gen_rtx_MEM (FUNCTION_MODE, function),
2514 const0_rtx, NULL_RTX, const0_rtx));
2515
2516 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
2517 }
2518 else
2519 gcc_unreachable ();
2520
2521 /* Find the CALL insn we just emitted, and attach the register usage
2522 information. */
2523 call_insn = last_call_insn ();
2524 add_function_usage_to (call_insn, call_fusage);
2525
2526 /* Restore the stack. */
2527 if (targetm.have_save_stack_nonlocal ())
2528 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
2529 else
2530 emit_stack_restore (SAVE_BLOCK, old_stack_level);
2531 fixup_args_size_notes (call_insn, get_last_insn (), 0);
2532
2533 OK_DEFER_POP;
2534
2535 /* Return the address of the result block. */
2536 result = copy_addr_to_reg (XEXP (result, 0));
2537 return convert_memory_address (ptr_mode, result);
2538 }
2539
2540 /* Perform an untyped return. */
2541
2542 static void
2543 expand_builtin_return (rtx result)
2544 {
2545 int size, align, regno;
2546 fixed_size_mode mode;
2547 rtx reg;
2548 rtx_insn *call_fusage = 0;
2549
2550 result = convert_memory_address (Pmode, result);
2551
2552 apply_result_size ();
2553 result = gen_rtx_MEM (BLKmode, result);
2554
2555 if (targetm.have_untyped_return ())
2556 {
2557 rtx vector = result_vector (0, result);
2558 emit_jump_insn (targetm.gen_untyped_return (result, vector));
2559 emit_barrier ();
2560 return;
2561 }
2562
2563 /* Restore the return value and note that each value is used. */
2564 size = 0;
2565 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2566 if ((mode = apply_result_mode[regno]) != VOIDmode)
2567 {
2568 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2569 if (size % align != 0)
2570 size = CEIL (size, align) * align;
2571 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2572 emit_move_insn (reg, adjust_address (result, mode, size));
2573
2574 push_to_sequence (call_fusage);
2575 emit_use (reg);
2576 call_fusage = get_insns ();
2577 end_sequence ();
2578 size += GET_MODE_SIZE (mode);
2579 }
2580
2581 /* Put the USE insns before the return. */
2582 emit_insn (call_fusage);
2583
2584 /* Return whatever values was restored by jumping directly to the end
2585 of the function. */
2586 expand_naked_return ();
2587 }
2588
2589 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
2590
2591 static enum type_class
2592 type_to_class (tree type)
2593 {
2594 switch (TREE_CODE (type))
2595 {
2596 case VOID_TYPE: return void_type_class;
2597 case INTEGER_TYPE: return integer_type_class;
2598 case ENUMERAL_TYPE: return enumeral_type_class;
2599 case BOOLEAN_TYPE: return boolean_type_class;
2600 case POINTER_TYPE: return pointer_type_class;
2601 case REFERENCE_TYPE: return reference_type_class;
2602 case OFFSET_TYPE: return offset_type_class;
2603 case REAL_TYPE: return real_type_class;
2604 case COMPLEX_TYPE: return complex_type_class;
2605 case FUNCTION_TYPE: return function_type_class;
2606 case METHOD_TYPE: return method_type_class;
2607 case RECORD_TYPE: return record_type_class;
2608 case UNION_TYPE:
2609 case QUAL_UNION_TYPE: return union_type_class;
2610 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
2611 ? string_type_class : array_type_class);
2612 case LANG_TYPE: return lang_type_class;
2613 case OPAQUE_TYPE: return opaque_type_class;
2614 default: return no_type_class;
2615 }
2616 }
2617
2618 /* Expand a call EXP to __builtin_classify_type. */
2619
2620 static rtx
2621 expand_builtin_classify_type (tree exp)
2622 {
2623 if (call_expr_nargs (exp))
2624 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
2625 return GEN_INT (no_type_class);
2626 }
2627
2628 /* This helper macro, meant to be used in mathfn_built_in below, determines
2629 which among a set of builtin math functions is appropriate for a given type
2630 mode. The `F' (float) and `L' (long double) are automatically generated
2631 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2632 types, there are additional types that are considered with 'F32', 'F64',
2633 'F128', etc. suffixes. */
2634 #define CASE_MATHFN(MATHFN) \
2635 CASE_CFN_##MATHFN: \
2636 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2637 fcodel = BUILT_IN_##MATHFN##L ; break;
2638 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2639 types. */
2640 #define CASE_MATHFN_FLOATN(MATHFN) \
2641 CASE_CFN_##MATHFN: \
2642 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2643 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2644 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2645 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2646 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2647 break;
2648 /* Similar to above, but appends _R after any F/L suffix. */
2649 #define CASE_MATHFN_REENT(MATHFN) \
2650 case CFN_BUILT_IN_##MATHFN##_R: \
2651 case CFN_BUILT_IN_##MATHFN##F_R: \
2652 case CFN_BUILT_IN_##MATHFN##L_R: \
2653 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2654 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2655
2656 /* Return a function equivalent to FN but operating on floating-point
2657 values of type TYPE, or END_BUILTINS if no such function exists.
2658 This is purely an operation on function codes; it does not guarantee
2659 that the target actually has an implementation of the function. */
2660
2661 static built_in_function
2662 mathfn_built_in_2 (tree type, combined_fn fn)
2663 {
2664 tree mtype;
2665 built_in_function fcode, fcodef, fcodel;
2666 built_in_function fcodef16 = END_BUILTINS;
2667 built_in_function fcodef32 = END_BUILTINS;
2668 built_in_function fcodef64 = END_BUILTINS;
2669 built_in_function fcodef128 = END_BUILTINS;
2670 built_in_function fcodef32x = END_BUILTINS;
2671 built_in_function fcodef64x = END_BUILTINS;
2672 built_in_function fcodef128x = END_BUILTINS;
2673
2674 switch (fn)
2675 {
2676 #define SEQ_OF_CASE_MATHFN \
2677 CASE_MATHFN (ACOS) \
2678 CASE_MATHFN (ACOSH) \
2679 CASE_MATHFN (ASIN) \
2680 CASE_MATHFN (ASINH) \
2681 CASE_MATHFN (ATAN) \
2682 CASE_MATHFN (ATAN2) \
2683 CASE_MATHFN (ATANH) \
2684 CASE_MATHFN (CBRT) \
2685 CASE_MATHFN_FLOATN (CEIL) \
2686 CASE_MATHFN (CEXPI) \
2687 CASE_MATHFN_FLOATN (COPYSIGN) \
2688 CASE_MATHFN (COS) \
2689 CASE_MATHFN (COSH) \
2690 CASE_MATHFN (DREM) \
2691 CASE_MATHFN (ERF) \
2692 CASE_MATHFN (ERFC) \
2693 CASE_MATHFN (EXP) \
2694 CASE_MATHFN (EXP10) \
2695 CASE_MATHFN (EXP2) \
2696 CASE_MATHFN (EXPM1) \
2697 CASE_MATHFN (FABS) \
2698 CASE_MATHFN (FDIM) \
2699 CASE_MATHFN_FLOATN (FLOOR) \
2700 CASE_MATHFN_FLOATN (FMA) \
2701 CASE_MATHFN_FLOATN (FMAX) \
2702 CASE_MATHFN_FLOATN (FMIN) \
2703 CASE_MATHFN (FMOD) \
2704 CASE_MATHFN (FREXP) \
2705 CASE_MATHFN (GAMMA) \
2706 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2707 CASE_MATHFN (HUGE_VAL) \
2708 CASE_MATHFN (HYPOT) \
2709 CASE_MATHFN (ILOGB) \
2710 CASE_MATHFN (ICEIL) \
2711 CASE_MATHFN (IFLOOR) \
2712 CASE_MATHFN (INF) \
2713 CASE_MATHFN (IRINT) \
2714 CASE_MATHFN (IROUND) \
2715 CASE_MATHFN (ISINF) \
2716 CASE_MATHFN (J0) \
2717 CASE_MATHFN (J1) \
2718 CASE_MATHFN (JN) \
2719 CASE_MATHFN (LCEIL) \
2720 CASE_MATHFN (LDEXP) \
2721 CASE_MATHFN (LFLOOR) \
2722 CASE_MATHFN (LGAMMA) \
2723 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2724 CASE_MATHFN (LLCEIL) \
2725 CASE_MATHFN (LLFLOOR) \
2726 CASE_MATHFN (LLRINT) \
2727 CASE_MATHFN (LLROUND) \
2728 CASE_MATHFN (LOG) \
2729 CASE_MATHFN (LOG10) \
2730 CASE_MATHFN (LOG1P) \
2731 CASE_MATHFN (LOG2) \
2732 CASE_MATHFN (LOGB) \
2733 CASE_MATHFN (LRINT) \
2734 CASE_MATHFN (LROUND) \
2735 CASE_MATHFN (MODF) \
2736 CASE_MATHFN (NAN) \
2737 CASE_MATHFN (NANS) \
2738 CASE_MATHFN_FLOATN (NEARBYINT) \
2739 CASE_MATHFN (NEXTAFTER) \
2740 CASE_MATHFN (NEXTTOWARD) \
2741 CASE_MATHFN (POW) \
2742 CASE_MATHFN (POWI) \
2743 CASE_MATHFN (POW10) \
2744 CASE_MATHFN (REMAINDER) \
2745 CASE_MATHFN (REMQUO) \
2746 CASE_MATHFN_FLOATN (RINT) \
2747 CASE_MATHFN_FLOATN (ROUND) \
2748 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2749 CASE_MATHFN (SCALB) \
2750 CASE_MATHFN (SCALBLN) \
2751 CASE_MATHFN (SCALBN) \
2752 CASE_MATHFN (SIGNBIT) \
2753 CASE_MATHFN (SIGNIFICAND) \
2754 CASE_MATHFN (SIN) \
2755 CASE_MATHFN (SINCOS) \
2756 CASE_MATHFN (SINH) \
2757 CASE_MATHFN_FLOATN (SQRT) \
2758 CASE_MATHFN (TAN) \
2759 CASE_MATHFN (TANH) \
2760 CASE_MATHFN (TGAMMA) \
2761 CASE_MATHFN_FLOATN (TRUNC) \
2762 CASE_MATHFN (Y0) \
2763 CASE_MATHFN (Y1) \
2764 CASE_MATHFN (YN)
2765
2766 SEQ_OF_CASE_MATHFN
2767
2768 default:
2769 return END_BUILTINS;
2770 }
2771
2772 mtype = TYPE_MAIN_VARIANT (type);
2773 if (mtype == double_type_node)
2774 return fcode;
2775 else if (mtype == float_type_node)
2776 return fcodef;
2777 else if (mtype == long_double_type_node)
2778 return fcodel;
2779 else if (mtype == float16_type_node)
2780 return fcodef16;
2781 else if (mtype == float32_type_node)
2782 return fcodef32;
2783 else if (mtype == float64_type_node)
2784 return fcodef64;
2785 else if (mtype == float128_type_node)
2786 return fcodef128;
2787 else if (mtype == float32x_type_node)
2788 return fcodef32x;
2789 else if (mtype == float64x_type_node)
2790 return fcodef64x;
2791 else if (mtype == float128x_type_node)
2792 return fcodef128x;
2793 else
2794 return END_BUILTINS;
2795 }
2796
2797 #undef CASE_MATHFN
2798 #undef CASE_MATHFN_FLOATN
2799 #undef CASE_MATHFN_REENT
2800
2801 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2802 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2803 otherwise use the explicit declaration. If we can't do the conversion,
2804 return null. */
2805
2806 static tree
2807 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2808 {
2809 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2810 if (fcode2 == END_BUILTINS)
2811 return NULL_TREE;
2812
2813 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2814 return NULL_TREE;
2815
2816 return builtin_decl_explicit (fcode2);
2817 }
2818
2819 /* Like mathfn_built_in_1, but always use the implicit array. */
2820
2821 tree
2822 mathfn_built_in (tree type, combined_fn fn)
2823 {
2824 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2825 }
2826
2827 /* Like mathfn_built_in_1, but take a built_in_function and
2828 always use the implicit array. */
2829
2830 tree
2831 mathfn_built_in (tree type, enum built_in_function fn)
2832 {
2833 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2834 }
2835
2836 /* Return the type associated with a built in function, i.e., the one
2837 to be passed to mathfn_built_in to get the type-specific
2838 function. */
2839
2840 tree
2841 mathfn_built_in_type (combined_fn fn)
2842 {
2843 #define CASE_MATHFN(MATHFN) \
2844 case CFN_BUILT_IN_##MATHFN: \
2845 return double_type_node; \
2846 case CFN_BUILT_IN_##MATHFN##F: \
2847 return float_type_node; \
2848 case CFN_BUILT_IN_##MATHFN##L: \
2849 return long_double_type_node;
2850
2851 #define CASE_MATHFN_FLOATN(MATHFN) \
2852 CASE_MATHFN(MATHFN) \
2853 case CFN_BUILT_IN_##MATHFN##F16: \
2854 return float16_type_node; \
2855 case CFN_BUILT_IN_##MATHFN##F32: \
2856 return float32_type_node; \
2857 case CFN_BUILT_IN_##MATHFN##F64: \
2858 return float64_type_node; \
2859 case CFN_BUILT_IN_##MATHFN##F128: \
2860 return float128_type_node; \
2861 case CFN_BUILT_IN_##MATHFN##F32X: \
2862 return float32x_type_node; \
2863 case CFN_BUILT_IN_##MATHFN##F64X: \
2864 return float64x_type_node; \
2865 case CFN_BUILT_IN_##MATHFN##F128X: \
2866 return float128x_type_node;
2867
2868 /* Similar to above, but appends _R after any F/L suffix. */
2869 #define CASE_MATHFN_REENT(MATHFN) \
2870 case CFN_BUILT_IN_##MATHFN##_R: \
2871 return double_type_node; \
2872 case CFN_BUILT_IN_##MATHFN##F_R: \
2873 return float_type_node; \
2874 case CFN_BUILT_IN_##MATHFN##L_R: \
2875 return long_double_type_node;
2876
2877 switch (fn)
2878 {
2879 SEQ_OF_CASE_MATHFN
2880
2881 default:
2882 return NULL_TREE;
2883 }
2884
2885 #undef CASE_MATHFN
2886 #undef CASE_MATHFN_FLOATN
2887 #undef CASE_MATHFN_REENT
2888 #undef SEQ_OF_CASE_MATHFN
2889 }
2890
2891 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2892 return its code, otherwise return IFN_LAST. Note that this function
2893 only tests whether the function is defined in internals.def, not whether
2894 it is actually available on the target. */
2895
2896 internal_fn
2897 associated_internal_fn (tree fndecl)
2898 {
2899 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2900 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2901 switch (DECL_FUNCTION_CODE (fndecl))
2902 {
2903 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2904 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2905 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2906 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2907 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2908 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2909 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2910 #include "internal-fn.def"
2911
2912 CASE_FLT_FN (BUILT_IN_POW10):
2913 return IFN_EXP10;
2914
2915 CASE_FLT_FN (BUILT_IN_DREM):
2916 return IFN_REMAINDER;
2917
2918 CASE_FLT_FN (BUILT_IN_SCALBN):
2919 CASE_FLT_FN (BUILT_IN_SCALBLN):
2920 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2921 return IFN_LDEXP;
2922 return IFN_LAST;
2923
2924 default:
2925 return IFN_LAST;
2926 }
2927 }
2928
2929 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2930 on the current target by a call to an internal function, return the
2931 code of that internal function, otherwise return IFN_LAST. The caller
2932 is responsible for ensuring that any side-effects of the built-in
2933 call are dealt with correctly. E.g. if CALL sets errno, the caller
2934 must decide that the errno result isn't needed or make it available
2935 in some other way. */
2936
2937 internal_fn
2938 replacement_internal_fn (gcall *call)
2939 {
2940 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2941 {
2942 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2943 if (ifn != IFN_LAST)
2944 {
2945 tree_pair types = direct_internal_fn_types (ifn, call);
2946 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2947 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2948 return ifn;
2949 }
2950 }
2951 return IFN_LAST;
2952 }
2953
2954 /* Expand a call to the builtin trinary math functions (fma).
2955 Return NULL_RTX if a normal call should be emitted rather than expanding the
2956 function in-line. EXP is the expression that is a call to the builtin
2957 function; if convenient, the result should be placed in TARGET.
2958 SUBTARGET may be used as the target for computing one of EXP's
2959 operands. */
2960
2961 static rtx
2962 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2963 {
2964 optab builtin_optab;
2965 rtx op0, op1, op2, result;
2966 rtx_insn *insns;
2967 tree fndecl = get_callee_fndecl (exp);
2968 tree arg0, arg1, arg2;
2969 machine_mode mode;
2970
2971 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2972 return NULL_RTX;
2973
2974 arg0 = CALL_EXPR_ARG (exp, 0);
2975 arg1 = CALL_EXPR_ARG (exp, 1);
2976 arg2 = CALL_EXPR_ARG (exp, 2);
2977
2978 switch (DECL_FUNCTION_CODE (fndecl))
2979 {
2980 CASE_FLT_FN (BUILT_IN_FMA):
2981 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2982 builtin_optab = fma_optab; break;
2983 default:
2984 gcc_unreachable ();
2985 }
2986
2987 /* Make a suitable register to place result in. */
2988 mode = TYPE_MODE (TREE_TYPE (exp));
2989
2990 /* Before working hard, check whether the instruction is available. */
2991 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2992 return NULL_RTX;
2993
2994 result = gen_reg_rtx (mode);
2995
2996 /* Always stabilize the argument list. */
2997 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2998 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2999 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
3000
3001 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3002 op1 = expand_normal (arg1);
3003 op2 = expand_normal (arg2);
3004
3005 start_sequence ();
3006
3007 /* Compute into RESULT.
3008 Set RESULT to wherever the result comes back. */
3009 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
3010 result, 0);
3011
3012 /* If we were unable to expand via the builtin, stop the sequence
3013 (without outputting the insns) and call to the library function
3014 with the stabilized argument list. */
3015 if (result == 0)
3016 {
3017 end_sequence ();
3018 return expand_call (exp, target, target == const0_rtx);
3019 }
3020
3021 /* Output the entire sequence. */
3022 insns = get_insns ();
3023 end_sequence ();
3024 emit_insn (insns);
3025
3026 return result;
3027 }
3028
3029 /* Expand a call to the builtin sin and cos math functions.
3030 Return NULL_RTX if a normal call should be emitted rather than expanding the
3031 function in-line. EXP is the expression that is a call to the builtin
3032 function; if convenient, the result should be placed in TARGET.
3033 SUBTARGET may be used as the target for computing one of EXP's
3034 operands. */
3035
3036 static rtx
3037 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
3038 {
3039 optab builtin_optab;
3040 rtx op0;
3041 rtx_insn *insns;
3042 tree fndecl = get_callee_fndecl (exp);
3043 machine_mode mode;
3044 tree arg;
3045
3046 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3047 return NULL_RTX;
3048
3049 arg = CALL_EXPR_ARG (exp, 0);
3050
3051 switch (DECL_FUNCTION_CODE (fndecl))
3052 {
3053 CASE_FLT_FN (BUILT_IN_SIN):
3054 CASE_FLT_FN (BUILT_IN_COS):
3055 builtin_optab = sincos_optab; break;
3056 default:
3057 gcc_unreachable ();
3058 }
3059
3060 /* Make a suitable register to place result in. */
3061 mode = TYPE_MODE (TREE_TYPE (exp));
3062
3063 /* Check if sincos insn is available, otherwise fallback
3064 to sin or cos insn. */
3065 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
3066 switch (DECL_FUNCTION_CODE (fndecl))
3067 {
3068 CASE_FLT_FN (BUILT_IN_SIN):
3069 builtin_optab = sin_optab; break;
3070 CASE_FLT_FN (BUILT_IN_COS):
3071 builtin_optab = cos_optab; break;
3072 default:
3073 gcc_unreachable ();
3074 }
3075
3076 /* Before working hard, check whether the instruction is available. */
3077 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
3078 {
3079 rtx result = gen_reg_rtx (mode);
3080
3081 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3082 need to expand the argument again. This way, we will not perform
3083 side-effects more the once. */
3084 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3085
3086 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
3087
3088 start_sequence ();
3089
3090 /* Compute into RESULT.
3091 Set RESULT to wherever the result comes back. */
3092 if (builtin_optab == sincos_optab)
3093 {
3094 int ok;
3095
3096 switch (DECL_FUNCTION_CODE (fndecl))
3097 {
3098 CASE_FLT_FN (BUILT_IN_SIN):
3099 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
3100 break;
3101 CASE_FLT_FN (BUILT_IN_COS):
3102 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
3103 break;
3104 default:
3105 gcc_unreachable ();
3106 }
3107 gcc_assert (ok);
3108 }
3109 else
3110 result = expand_unop (mode, builtin_optab, op0, result, 0);
3111
3112 if (result != 0)
3113 {
3114 /* Output the entire sequence. */
3115 insns = get_insns ();
3116 end_sequence ();
3117 emit_insn (insns);
3118 return result;
3119 }
3120
3121 /* If we were unable to expand via the builtin, stop the sequence
3122 (without outputting the insns) and call to the library function
3123 with the stabilized argument list. */
3124 end_sequence ();
3125 }
3126
3127 return expand_call (exp, target, target == const0_rtx);
3128 }
3129
3130 /* Given an interclass math builtin decl FNDECL and it's argument ARG
3131 return an RTL instruction code that implements the functionality.
3132 If that isn't possible or available return CODE_FOR_nothing. */
3133
3134 static enum insn_code
3135 interclass_mathfn_icode (tree arg, tree fndecl)
3136 {
3137 bool errno_set = false;
3138 optab builtin_optab = unknown_optab;
3139 machine_mode mode;
3140
3141 switch (DECL_FUNCTION_CODE (fndecl))
3142 {
3143 CASE_FLT_FN (BUILT_IN_ILOGB):
3144 errno_set = true; builtin_optab = ilogb_optab; break;
3145 CASE_FLT_FN (BUILT_IN_ISINF):
3146 builtin_optab = isinf_optab; break;
3147 case BUILT_IN_ISNORMAL:
3148 case BUILT_IN_ISFINITE:
3149 CASE_FLT_FN (BUILT_IN_FINITE):
3150 case BUILT_IN_FINITED32:
3151 case BUILT_IN_FINITED64:
3152 case BUILT_IN_FINITED128:
3153 case BUILT_IN_ISINFD32:
3154 case BUILT_IN_ISINFD64:
3155 case BUILT_IN_ISINFD128:
3156 /* These builtins have no optabs (yet). */
3157 break;
3158 default:
3159 gcc_unreachable ();
3160 }
3161
3162 /* There's no easy way to detect the case we need to set EDOM. */
3163 if (flag_errno_math && errno_set)
3164 return CODE_FOR_nothing;
3165
3166 /* Optab mode depends on the mode of the input argument. */
3167 mode = TYPE_MODE (TREE_TYPE (arg));
3168
3169 if (builtin_optab)
3170 return optab_handler (builtin_optab, mode);
3171 return CODE_FOR_nothing;
3172 }
3173
3174 /* Expand a call to one of the builtin math functions that operate on
3175 floating point argument and output an integer result (ilogb, isinf,
3176 isnan, etc).
3177 Return 0 if a normal call should be emitted rather than expanding the
3178 function in-line. EXP is the expression that is a call to the builtin
3179 function; if convenient, the result should be placed in TARGET. */
3180
3181 static rtx
3182 expand_builtin_interclass_mathfn (tree exp, rtx target)
3183 {
3184 enum insn_code icode = CODE_FOR_nothing;
3185 rtx op0;
3186 tree fndecl = get_callee_fndecl (exp);
3187 machine_mode mode;
3188 tree arg;
3189
3190 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3191 return NULL_RTX;
3192
3193 arg = CALL_EXPR_ARG (exp, 0);
3194 icode = interclass_mathfn_icode (arg, fndecl);
3195 mode = TYPE_MODE (TREE_TYPE (arg));
3196
3197 if (icode != CODE_FOR_nothing)
3198 {
3199 class expand_operand ops[1];
3200 rtx_insn *last = get_last_insn ();
3201 tree orig_arg = arg;
3202
3203 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3204 need to expand the argument again. This way, we will not perform
3205 side-effects more the once. */
3206 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3207
3208 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3209
3210 if (mode != GET_MODE (op0))
3211 op0 = convert_to_mode (mode, op0, 0);
3212
3213 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
3214 if (maybe_legitimize_operands (icode, 0, 1, ops)
3215 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
3216 return ops[0].value;
3217
3218 delete_insns_since (last);
3219 CALL_EXPR_ARG (exp, 0) = orig_arg;
3220 }
3221
3222 return NULL_RTX;
3223 }
3224
3225 /* Expand a call to the builtin sincos math function.
3226 Return NULL_RTX if a normal call should be emitted rather than expanding the
3227 function in-line. EXP is the expression that is a call to the builtin
3228 function. */
3229
3230 static rtx
3231 expand_builtin_sincos (tree exp)
3232 {
3233 rtx op0, op1, op2, target1, target2;
3234 machine_mode mode;
3235 tree arg, sinp, cosp;
3236 int result;
3237 location_t loc = EXPR_LOCATION (exp);
3238 tree alias_type, alias_off;
3239
3240 if (!validate_arglist (exp, REAL_TYPE,
3241 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3242 return NULL_RTX;
3243
3244 arg = CALL_EXPR_ARG (exp, 0);
3245 sinp = CALL_EXPR_ARG (exp, 1);
3246 cosp = CALL_EXPR_ARG (exp, 2);
3247
3248 /* Make a suitable register to place result in. */
3249 mode = TYPE_MODE (TREE_TYPE (arg));
3250
3251 /* Check if sincos insn is available, otherwise emit the call. */
3252 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
3253 return NULL_RTX;
3254
3255 target1 = gen_reg_rtx (mode);
3256 target2 = gen_reg_rtx (mode);
3257
3258 op0 = expand_normal (arg);
3259 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
3260 alias_off = build_int_cst (alias_type, 0);
3261 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3262 sinp, alias_off));
3263 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3264 cosp, alias_off));
3265
3266 /* Compute into target1 and target2.
3267 Set TARGET to wherever the result comes back. */
3268 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
3269 gcc_assert (result);
3270
3271 /* Move target1 and target2 to the memory locations indicated
3272 by op1 and op2. */
3273 emit_move_insn (op1, target1);
3274 emit_move_insn (op2, target2);
3275
3276 return const0_rtx;
3277 }
3278
3279 /* Expand a call to the internal cexpi builtin to the sincos math function.
3280 EXP is the expression that is a call to the builtin function; if convenient,
3281 the result should be placed in TARGET. */
3282
3283 static rtx
3284 expand_builtin_cexpi (tree exp, rtx target)
3285 {
3286 tree fndecl = get_callee_fndecl (exp);
3287 tree arg, type;
3288 machine_mode mode;
3289 rtx op0, op1, op2;
3290 location_t loc = EXPR_LOCATION (exp);
3291
3292 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3293 return NULL_RTX;
3294
3295 arg = CALL_EXPR_ARG (exp, 0);
3296 type = TREE_TYPE (arg);
3297 mode = TYPE_MODE (TREE_TYPE (arg));
3298
3299 /* Try expanding via a sincos optab, fall back to emitting a libcall
3300 to sincos or cexp. We are sure we have sincos or cexp because cexpi
3301 is only generated from sincos, cexp or if we have either of them. */
3302 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
3303 {
3304 op1 = gen_reg_rtx (mode);
3305 op2 = gen_reg_rtx (mode);
3306
3307 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3308
3309 /* Compute into op1 and op2. */
3310 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
3311 }
3312 else if (targetm.libc_has_function (function_sincos, type))
3313 {
3314 tree call, fn = NULL_TREE;
3315 tree top1, top2;
3316 rtx op1a, op2a;
3317
3318 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3319 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
3320 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3321 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
3322 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3323 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
3324 else
3325 gcc_unreachable ();
3326
3327 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
3328 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
3329 op1a = copy_addr_to_reg (XEXP (op1, 0));
3330 op2a = copy_addr_to_reg (XEXP (op2, 0));
3331 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
3332 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
3333
3334 /* Make sure not to fold the sincos call again. */
3335 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3336 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
3337 call, 3, arg, top1, top2));
3338 }
3339 else
3340 {
3341 tree call, fn = NULL_TREE, narg;
3342 tree ctype = build_complex_type (type);
3343
3344 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3345 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
3346 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3347 fn = builtin_decl_explicit (BUILT_IN_CEXP);
3348 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3349 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
3350 else
3351 gcc_unreachable ();
3352
3353 /* If we don't have a decl for cexp create one. This is the
3354 friendliest fallback if the user calls __builtin_cexpi
3355 without full target C99 function support. */
3356 if (fn == NULL_TREE)
3357 {
3358 tree fntype;
3359 const char *name = NULL;
3360
3361 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3362 name = "cexpf";
3363 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3364 name = "cexp";
3365 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3366 name = "cexpl";
3367
3368 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
3369 fn = build_fn_decl (name, fntype);
3370 }
3371
3372 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
3373 build_real (type, dconst0), arg);
3374
3375 /* Make sure not to fold the cexp call again. */
3376 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3377 return expand_expr (build_call_nary (ctype, call, 1, narg),
3378 target, VOIDmode, EXPAND_NORMAL);
3379 }
3380
3381 /* Now build the proper return type. */
3382 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
3383 make_tree (TREE_TYPE (arg), op2),
3384 make_tree (TREE_TYPE (arg), op1)),
3385 target, VOIDmode, EXPAND_NORMAL);
3386 }
3387
3388 /* Conveniently construct a function call expression. FNDECL names the
3389 function to be called, N is the number of arguments, and the "..."
3390 parameters are the argument expressions. Unlike build_call_exr
3391 this doesn't fold the call, hence it will always return a CALL_EXPR. */
3392
3393 static tree
3394 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
3395 {
3396 va_list ap;
3397 tree fntype = TREE_TYPE (fndecl);
3398 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
3399
3400 va_start (ap, n);
3401 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
3402 va_end (ap);
3403 SET_EXPR_LOCATION (fn, loc);
3404 return fn;
3405 }
3406
3407 /* Expand a call to one of the builtin rounding functions gcc defines
3408 as an extension (lfloor and lceil). As these are gcc extensions we
3409 do not need to worry about setting errno to EDOM.
3410 If expanding via optab fails, lower expression to (int)(floor(x)).
3411 EXP is the expression that is a call to the builtin function;
3412 if convenient, the result should be placed in TARGET. */
3413
3414 static rtx
3415 expand_builtin_int_roundingfn (tree exp, rtx target)
3416 {
3417 convert_optab builtin_optab;
3418 rtx op0, tmp;
3419 rtx_insn *insns;
3420 tree fndecl = get_callee_fndecl (exp);
3421 enum built_in_function fallback_fn;
3422 tree fallback_fndecl;
3423 machine_mode mode;
3424 tree arg;
3425
3426 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3427 return NULL_RTX;
3428
3429 arg = CALL_EXPR_ARG (exp, 0);
3430
3431 switch (DECL_FUNCTION_CODE (fndecl))
3432 {
3433 CASE_FLT_FN (BUILT_IN_ICEIL):
3434 CASE_FLT_FN (BUILT_IN_LCEIL):
3435 CASE_FLT_FN (BUILT_IN_LLCEIL):
3436 builtin_optab = lceil_optab;
3437 fallback_fn = BUILT_IN_CEIL;
3438 break;
3439
3440 CASE_FLT_FN (BUILT_IN_IFLOOR):
3441 CASE_FLT_FN (BUILT_IN_LFLOOR):
3442 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3443 builtin_optab = lfloor_optab;
3444 fallback_fn = BUILT_IN_FLOOR;
3445 break;
3446
3447 default:
3448 gcc_unreachable ();
3449 }
3450
3451 /* Make a suitable register to place result in. */
3452 mode = TYPE_MODE (TREE_TYPE (exp));
3453
3454 target = gen_reg_rtx (mode);
3455
3456 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3457 need to expand the argument again. This way, we will not perform
3458 side-effects more the once. */
3459 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3460
3461 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3462
3463 start_sequence ();
3464
3465 /* Compute into TARGET. */
3466 if (expand_sfix_optab (target, op0, builtin_optab))
3467 {
3468 /* Output the entire sequence. */
3469 insns = get_insns ();
3470 end_sequence ();
3471 emit_insn (insns);
3472 return target;
3473 }
3474
3475 /* If we were unable to expand via the builtin, stop the sequence
3476 (without outputting the insns). */
3477 end_sequence ();
3478
3479 /* Fall back to floating point rounding optab. */
3480 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3481
3482 /* For non-C99 targets we may end up without a fallback fndecl here
3483 if the user called __builtin_lfloor directly. In this case emit
3484 a call to the floor/ceil variants nevertheless. This should result
3485 in the best user experience for not full C99 targets. */
3486 if (fallback_fndecl == NULL_TREE)
3487 {
3488 tree fntype;
3489 const char *name = NULL;
3490
3491 switch (DECL_FUNCTION_CODE (fndecl))
3492 {
3493 case BUILT_IN_ICEIL:
3494 case BUILT_IN_LCEIL:
3495 case BUILT_IN_LLCEIL:
3496 name = "ceil";
3497 break;
3498 case BUILT_IN_ICEILF:
3499 case BUILT_IN_LCEILF:
3500 case BUILT_IN_LLCEILF:
3501 name = "ceilf";
3502 break;
3503 case BUILT_IN_ICEILL:
3504 case BUILT_IN_LCEILL:
3505 case BUILT_IN_LLCEILL:
3506 name = "ceill";
3507 break;
3508 case BUILT_IN_IFLOOR:
3509 case BUILT_IN_LFLOOR:
3510 case BUILT_IN_LLFLOOR:
3511 name = "floor";
3512 break;
3513 case BUILT_IN_IFLOORF:
3514 case BUILT_IN_LFLOORF:
3515 case BUILT_IN_LLFLOORF:
3516 name = "floorf";
3517 break;
3518 case BUILT_IN_IFLOORL:
3519 case BUILT_IN_LFLOORL:
3520 case BUILT_IN_LLFLOORL:
3521 name = "floorl";
3522 break;
3523 default:
3524 gcc_unreachable ();
3525 }
3526
3527 fntype = build_function_type_list (TREE_TYPE (arg),
3528 TREE_TYPE (arg), NULL_TREE);
3529 fallback_fndecl = build_fn_decl (name, fntype);
3530 }
3531
3532 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3533
3534 tmp = expand_normal (exp);
3535 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3536
3537 /* Truncate the result of floating point optab to integer
3538 via expand_fix (). */
3539 target = gen_reg_rtx (mode);
3540 expand_fix (target, tmp, 0);
3541
3542 return target;
3543 }
3544
3545 /* Expand a call to one of the builtin math functions doing integer
3546 conversion (lrint).
3547 Return 0 if a normal call should be emitted rather than expanding the
3548 function in-line. EXP is the expression that is a call to the builtin
3549 function; if convenient, the result should be placed in TARGET. */
3550
3551 static rtx
3552 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3553 {
3554 convert_optab builtin_optab;
3555 rtx op0;
3556 rtx_insn *insns;
3557 tree fndecl = get_callee_fndecl (exp);
3558 tree arg;
3559 machine_mode mode;
3560 enum built_in_function fallback_fn = BUILT_IN_NONE;
3561
3562 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3563 return NULL_RTX;
3564
3565 arg = CALL_EXPR_ARG (exp, 0);
3566
3567 switch (DECL_FUNCTION_CODE (fndecl))
3568 {
3569 CASE_FLT_FN (BUILT_IN_IRINT):
3570 fallback_fn = BUILT_IN_LRINT;
3571 gcc_fallthrough ();
3572 CASE_FLT_FN (BUILT_IN_LRINT):
3573 CASE_FLT_FN (BUILT_IN_LLRINT):
3574 builtin_optab = lrint_optab;
3575 break;
3576
3577 CASE_FLT_FN (BUILT_IN_IROUND):
3578 fallback_fn = BUILT_IN_LROUND;
3579 gcc_fallthrough ();
3580 CASE_FLT_FN (BUILT_IN_LROUND):
3581 CASE_FLT_FN (BUILT_IN_LLROUND):
3582 builtin_optab = lround_optab;
3583 break;
3584
3585 default:
3586 gcc_unreachable ();
3587 }
3588
3589 /* There's no easy way to detect the case we need to set EDOM. */
3590 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3591 return NULL_RTX;
3592
3593 /* Make a suitable register to place result in. */
3594 mode = TYPE_MODE (TREE_TYPE (exp));
3595
3596 /* There's no easy way to detect the case we need to set EDOM. */
3597 if (!flag_errno_math)
3598 {
3599 rtx result = gen_reg_rtx (mode);
3600
3601 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3602 need to expand the argument again. This way, we will not perform
3603 side-effects more the once. */
3604 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3605
3606 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3607
3608 start_sequence ();
3609
3610 if (expand_sfix_optab (result, op0, builtin_optab))
3611 {
3612 /* Output the entire sequence. */
3613 insns = get_insns ();
3614 end_sequence ();
3615 emit_insn (insns);
3616 return result;
3617 }
3618
3619 /* If we were unable to expand via the builtin, stop the sequence
3620 (without outputting the insns) and call to the library function
3621 with the stabilized argument list. */
3622 end_sequence ();
3623 }
3624
3625 if (fallback_fn != BUILT_IN_NONE)
3626 {
3627 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3628 targets, (int) round (x) should never be transformed into
3629 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3630 a call to lround in the hope that the target provides at least some
3631 C99 functions. This should result in the best user experience for
3632 not full C99 targets. */
3633 tree fallback_fndecl = mathfn_built_in_1
3634 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
3635
3636 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3637 fallback_fndecl, 1, arg);
3638
3639 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3640 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3641 return convert_to_mode (mode, target, 0);
3642 }
3643
3644 return expand_call (exp, target, target == const0_rtx);
3645 }
3646
3647 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3648 a normal call should be emitted rather than expanding the function
3649 in-line. EXP is the expression that is a call to the builtin
3650 function; if convenient, the result should be placed in TARGET. */
3651
3652 static rtx
3653 expand_builtin_powi (tree exp, rtx target)
3654 {
3655 tree arg0, arg1;
3656 rtx op0, op1;
3657 machine_mode mode;
3658 machine_mode mode2;
3659
3660 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3661 return NULL_RTX;
3662
3663 arg0 = CALL_EXPR_ARG (exp, 0);
3664 arg1 = CALL_EXPR_ARG (exp, 1);
3665 mode = TYPE_MODE (TREE_TYPE (exp));
3666
3667 /* Emit a libcall to libgcc. */
3668
3669 /* Mode of the 2nd argument must match that of an int. */
3670 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3671
3672 if (target == NULL_RTX)
3673 target = gen_reg_rtx (mode);
3674
3675 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3676 if (GET_MODE (op0) != mode)
3677 op0 = convert_to_mode (mode, op0, 0);
3678 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3679 if (GET_MODE (op1) != mode2)
3680 op1 = convert_to_mode (mode2, op1, 0);
3681
3682 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3683 target, LCT_CONST, mode,
3684 op0, mode, op1, mode2);
3685
3686 return target;
3687 }
3688
3689 /* Expand expression EXP which is a call to the strlen builtin. Return
3690 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3691 try to get the result in TARGET, if convenient. */
3692
3693 static rtx
3694 expand_builtin_strlen (tree exp, rtx target,
3695 machine_mode target_mode)
3696 {
3697 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3698 return NULL_RTX;
3699
3700 tree src = CALL_EXPR_ARG (exp, 0);
3701 if (!check_read_access (exp, src))
3702 return NULL_RTX;
3703
3704 /* If the length can be computed at compile-time, return it. */
3705 if (tree len = c_strlen (src, 0))
3706 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3707
3708 /* If the length can be computed at compile-time and is constant
3709 integer, but there are side-effects in src, evaluate
3710 src for side-effects, then return len.
3711 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3712 can be optimized into: i++; x = 3; */
3713 tree len = c_strlen (src, 1);
3714 if (len && TREE_CODE (len) == INTEGER_CST)
3715 {
3716 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3717 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3718 }
3719
3720 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3721
3722 /* If SRC is not a pointer type, don't do this operation inline. */
3723 if (align == 0)
3724 return NULL_RTX;
3725
3726 /* Bail out if we can't compute strlen in the right mode. */
3727 machine_mode insn_mode;
3728 enum insn_code icode = CODE_FOR_nothing;
3729 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3730 {
3731 icode = optab_handler (strlen_optab, insn_mode);
3732 if (icode != CODE_FOR_nothing)
3733 break;
3734 }
3735 if (insn_mode == VOIDmode)
3736 return NULL_RTX;
3737
3738 /* Make a place to hold the source address. We will not expand
3739 the actual source until we are sure that the expansion will
3740 not fail -- there are trees that cannot be expanded twice. */
3741 rtx src_reg = gen_reg_rtx (Pmode);
3742
3743 /* Mark the beginning of the strlen sequence so we can emit the
3744 source operand later. */
3745 rtx_insn *before_strlen = get_last_insn ();
3746
3747 class expand_operand ops[4];
3748 create_output_operand (&ops[0], target, insn_mode);
3749 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3750 create_integer_operand (&ops[2], 0);
3751 create_integer_operand (&ops[3], align);
3752 if (!maybe_expand_insn (icode, 4, ops))
3753 return NULL_RTX;
3754
3755 /* Check to see if the argument was declared attribute nonstring
3756 and if so, issue a warning since at this point it's not known
3757 to be nul-terminated. */
3758 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3759
3760 /* Now that we are assured of success, expand the source. */
3761 start_sequence ();
3762 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3763 if (pat != src_reg)
3764 {
3765 #ifdef POINTERS_EXTEND_UNSIGNED
3766 if (GET_MODE (pat) != Pmode)
3767 pat = convert_to_mode (Pmode, pat,
3768 POINTERS_EXTEND_UNSIGNED);
3769 #endif
3770 emit_move_insn (src_reg, pat);
3771 }
3772 pat = get_insns ();
3773 end_sequence ();
3774
3775 if (before_strlen)
3776 emit_insn_after (pat, before_strlen);
3777 else
3778 emit_insn_before (pat, get_insns ());
3779
3780 /* Return the value in the proper mode for this function. */
3781 if (GET_MODE (ops[0].value) == target_mode)
3782 target = ops[0].value;
3783 else if (target != 0)
3784 convert_move (target, ops[0].value, 0);
3785 else
3786 target = convert_to_mode (target_mode, ops[0].value, 0);
3787
3788 return target;
3789 }
3790
3791 /* Expand call EXP to the strnlen built-in, returning the result
3792 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3793
3794 static rtx
3795 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3796 {
3797 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3798 return NULL_RTX;
3799
3800 tree src = CALL_EXPR_ARG (exp, 0);
3801 tree bound = CALL_EXPR_ARG (exp, 1);
3802
3803 if (!bound)
3804 return NULL_RTX;
3805
3806 check_read_access (exp, src, bound);
3807
3808 location_t loc = UNKNOWN_LOCATION;
3809 if (EXPR_HAS_LOCATION (exp))
3810 loc = EXPR_LOCATION (exp);
3811
3812 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3813 so these conversions aren't necessary. */
3814 c_strlen_data lendata = { };
3815 tree len = c_strlen (src, 0, &lendata, 1);
3816 if (len)
3817 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3818
3819 if (TREE_CODE (bound) == INTEGER_CST)
3820 {
3821 if (!len)
3822 return NULL_RTX;
3823
3824 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3825 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3826 }
3827
3828 if (TREE_CODE (bound) != SSA_NAME)
3829 return NULL_RTX;
3830
3831 wide_int min, max;
3832 enum value_range_kind rng = get_range_info (bound, &min, &max);
3833 if (rng != VR_RANGE)
3834 return NULL_RTX;
3835
3836 if (!len || TREE_CODE (len) != INTEGER_CST)
3837 {
3838 bool exact;
3839 lendata.decl = unterminated_array (src, &len, &exact);
3840 if (!lendata.decl)
3841 return NULL_RTX;
3842 }
3843
3844 if (lendata.decl)
3845 return NULL_RTX;
3846
3847 if (wi::gtu_p (min, wi::to_wide (len)))
3848 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3849
3850 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3851 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3852 }
3853
3854 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3855 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3856 a target constant. */
3857
3858 static rtx
3859 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3860 scalar_int_mode mode)
3861 {
3862 /* The REPresentation pointed to by DATA need not be a nul-terminated
3863 string but the caller guarantees it's large enough for MODE. */
3864 const char *rep = (const char *) data;
3865
3866 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3867 }
3868
3869 /* LEN specify length of the block of memcpy/memset operation.
3870 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3871 In some cases we can make very likely guess on max size, then we
3872 set it into PROBABLE_MAX_SIZE. */
3873
3874 static void
3875 determine_block_size (tree len, rtx len_rtx,
3876 unsigned HOST_WIDE_INT *min_size,
3877 unsigned HOST_WIDE_INT *max_size,
3878 unsigned HOST_WIDE_INT *probable_max_size)
3879 {
3880 if (CONST_INT_P (len_rtx))
3881 {
3882 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3883 return;
3884 }
3885 else
3886 {
3887 wide_int min, max;
3888 enum value_range_kind range_type = VR_UNDEFINED;
3889
3890 /* Determine bounds from the type. */
3891 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3892 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3893 else
3894 *min_size = 0;
3895 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3896 *probable_max_size = *max_size
3897 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3898 else
3899 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3900
3901 if (TREE_CODE (len) == SSA_NAME)
3902 range_type = get_range_info (len, &min, &max);
3903 if (range_type == VR_RANGE)
3904 {
3905 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3906 *min_size = min.to_uhwi ();
3907 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3908 *probable_max_size = *max_size = max.to_uhwi ();
3909 }
3910 else if (range_type == VR_ANTI_RANGE)
3911 {
3912 /* Code like
3913
3914 int n;
3915 if (n < 100)
3916 memcpy (a, b, n)
3917
3918 Produce anti range allowing negative values of N. We still
3919 can use the information and make a guess that N is not negative.
3920 */
3921 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3922 *probable_max_size = min.to_uhwi () - 1;
3923 }
3924 }
3925 gcc_checking_assert (*max_size <=
3926 (unsigned HOST_WIDE_INT)
3927 GET_MODE_MASK (GET_MODE (len_rtx)));
3928 }
3929
3930 /* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3931 accessing an object with SIZE. */
3932
3933 static bool
3934 maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func,
3935 tree bndrng[2], tree size, const access_data *pad = NULL)
3936 {
3937 if (!bndrng[0] || TREE_NO_WARNING (exp))
3938 return false;
3939
3940 tree maxobjsize = max_object_size ();
3941
3942 bool warned = false;
3943
3944 if (opt == OPT_Wstringop_overread)
3945 {
3946 bool maybe = pad && pad->src.phi ();
3947
3948 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3949 {
3950 if (bndrng[0] == bndrng[1])
3951 warned = (func
3952 ? warning_at (loc, opt,
3953 (maybe
3954 ? G_("%K%qD specified bound %E may "
3955 "exceed maximum object size %E")
3956 : G_("%K%qD specified bound %E "
3957 "exceeds maximum object size %E")),
3958 exp, func, bndrng[0], maxobjsize)
3959 : warning_at (loc, opt,
3960 (maybe
3961 ? G_("%Kspecified bound %E may "
3962 "exceed maximum object size %E")
3963 : G_("%Kspecified bound %E "
3964 "exceeds maximum object size %E")),
3965 exp, bndrng[0], maxobjsize));
3966 else
3967 warned = (func
3968 ? warning_at (loc, opt,
3969 (maybe
3970 ? G_("%K%qD specified bound [%E, %E] may "
3971 "exceed maximum object size %E")
3972 : G_("%K%qD specified bound [%E, %E] "
3973 "exceeds maximum object size %E")),
3974 exp, func,
3975 bndrng[0], bndrng[1], maxobjsize)
3976 : warning_at (loc, opt,
3977 (maybe
3978 ? G_("%Kspecified bound [%E, %E] may "
3979 "exceed maximum object size %E")
3980 : G_("%Kspecified bound [%E, %E] "
3981 "exceeds maximum object size %E")),
3982 exp, bndrng[0], bndrng[1], maxobjsize));
3983 }
3984 else if (!size || tree_int_cst_le (bndrng[0], size))
3985 return false;
3986 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3987 warned = (func
3988 ? warning_at (loc, opt,
3989 (maybe
3990 ? G_("%K%qD specified bound %E may exceed "
3991 "source size %E")
3992 : G_("%K%qD specified bound %E exceeds "
3993 "source size %E")),
3994 exp, func, bndrng[0], size)
3995 : warning_at (loc, opt,
3996 (maybe
3997 ? G_("%Kspecified bound %E may exceed "
3998 "source size %E")
3999 : G_("%Kspecified bound %E exceeds "
4000 "source size %E")),
4001 exp, bndrng[0], size));
4002 else
4003 warned = (func
4004 ? warning_at (loc, opt,
4005 (maybe
4006 ? G_("%K%qD specified bound [%E, %E] may "
4007 "exceed source size %E")
4008 : G_("%K%qD specified bound [%E, %E] exceeds "
4009 "source size %E")),
4010 exp, func, bndrng[0], bndrng[1], size)
4011 : warning_at (loc, opt,
4012 (maybe
4013 ? G_("%Kspecified bound [%E, %E] may exceed "
4014 "source size %E")
4015 : G_("%Kspecified bound [%E, %E] exceeds "
4016 "source size %E")),
4017 exp, bndrng[0], bndrng[1], size));
4018 if (warned)
4019 {
4020 if (pad && pad->src.ref)
4021 {
4022 if (DECL_P (pad->src.ref))
4023 inform (DECL_SOURCE_LOCATION (pad->src.ref),
4024 "source object declared here");
4025 else if (EXPR_HAS_LOCATION (pad->src.ref))
4026 inform (EXPR_LOCATION (pad->src.ref),
4027 "source object allocated here");
4028 }
4029 TREE_NO_WARNING (exp) = true;
4030 }
4031
4032 return warned;
4033 }
4034
4035 bool maybe = pad && pad->dst.phi ();
4036 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
4037 {
4038 if (bndrng[0] == bndrng[1])
4039 warned = (func
4040 ? warning_at (loc, opt,
4041 (maybe
4042 ? G_("%K%qD specified size %E may "
4043 "exceed maximum object size %E")
4044 : G_("%K%qD specified size %E "
4045 "exceeds maximum object size %E")),
4046 exp, func, bndrng[0], maxobjsize)
4047 : warning_at (loc, opt,
4048 (maybe
4049 ? G_("%Kspecified size %E may exceed "
4050 "maximum object size %E")
4051 : G_("%Kspecified size %E exceeds "
4052 "maximum object size %E")),
4053 exp, bndrng[0], maxobjsize));
4054 else
4055 warned = (func
4056 ? warning_at (loc, opt,
4057 (maybe
4058 ? G_("%K%qD specified size between %E and %E "
4059 "may exceed maximum object size %E")
4060 : G_("%K%qD specified size between %E and %E "
4061 "exceeds maximum object size %E")),
4062 exp, func,
4063 bndrng[0], bndrng[1], maxobjsize)
4064 : warning_at (loc, opt,
4065 (maybe
4066 ? G_("%Kspecified size between %E and %E "
4067 "may exceed maximum object size %E")
4068 : G_("%Kspecified size between %E and %E "
4069 "exceeds maximum object size %E")),
4070 exp, bndrng[0], bndrng[1], maxobjsize));
4071 }
4072 else if (!size || tree_int_cst_le (bndrng[0], size))
4073 return false;
4074 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
4075 warned = (func
4076 ? warning_at (loc, OPT_Wstringop_overflow_,
4077 (maybe
4078 ? G_("%K%qD specified bound %E may exceed "
4079 "destination size %E")
4080 : G_("%K%qD specified bound %E exceeds "
4081 "destination size %E")),
4082 exp, func, bndrng[0], size)
4083 : warning_at (loc, OPT_Wstringop_overflow_,
4084 (maybe
4085 ? G_("%Kspecified bound %E may exceed "
4086 "destination size %E")
4087 : G_("%Kspecified bound %E exceeds "
4088 "destination size %E")),
4089 exp, bndrng[0], size));
4090 else
4091 warned = (func
4092 ? warning_at (loc, OPT_Wstringop_overflow_,
4093 (maybe
4094 ? G_("%K%qD specified bound [%E, %E] may exceed "
4095 "destination size %E")
4096 : G_("%K%qD specified bound [%E, %E] exceeds "
4097 "destination size %E")),
4098 exp, func, bndrng[0], bndrng[1], size)
4099 : warning_at (loc, OPT_Wstringop_overflow_,
4100 (maybe
4101 ? G_("%Kspecified bound [%E, %E] exceeds "
4102 "destination size %E")
4103 : G_("%Kspecified bound [%E, %E] exceeds "
4104 "destination size %E")),
4105 exp, bndrng[0], bndrng[1], size));
4106
4107 if (warned)
4108 {
4109 if (pad && pad->dst.ref)
4110 {
4111 if (DECL_P (pad->dst.ref))
4112 inform (DECL_SOURCE_LOCATION (pad->dst.ref),
4113 "destination object declared here");
4114 else if (EXPR_HAS_LOCATION (pad->dst.ref))
4115 inform (EXPR_LOCATION (pad->dst.ref),
4116 "destination object allocated here");
4117 }
4118 TREE_NO_WARNING (exp) = true;
4119 }
4120
4121 return warned;
4122 }
4123
4124 /* For an expression EXP issue an access warning controlled by option OPT
4125 with access to a region SIZE bytes in size in the RANGE of sizes.
4126 WRITE is true for a write access, READ for a read access, neither for
4127 call that may or may not perform an access but for which the range
4128 is expected to valid.
4129 Returns true when a warning has been issued. */
4130
4131 static bool
4132 warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
4133 tree size, bool write, bool read, bool maybe)
4134 {
4135 bool warned = false;
4136
4137 if (write && read)
4138 {
4139 if (tree_int_cst_equal (range[0], range[1]))
4140 warned = (func
4141 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4142 (maybe
4143 ? G_("%K%qD may access %E byte in a region "
4144 "of size %E")
4145 : G_("%K%qD accessing %E byte in a region "
4146 "of size %E")),
4147 (maybe
4148 ? G_ ("%K%qD may access %E bytes in a region "
4149 "of size %E")
4150 : G_ ("%K%qD accessing %E bytes in a region "
4151 "of size %E")),
4152 exp, func, range[0], size)
4153 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4154 (maybe
4155 ? G_("%Kmay access %E byte in a region "
4156 "of size %E")
4157 : G_("%Kaccessing %E byte in a region "
4158 "of size %E")),
4159 (maybe
4160 ? G_("%Kmay access %E bytes in a region "
4161 "of size %E")
4162 : G_("%Kaccessing %E bytes in a region "
4163 "of size %E")),
4164 exp, range[0], size));
4165 else if (tree_int_cst_sign_bit (range[1]))
4166 {
4167 /* Avoid printing the upper bound if it's invalid. */
4168 warned = (func
4169 ? warning_at (loc, opt,
4170 (maybe
4171 ? G_("%K%qD may access %E or more bytes "
4172 "in a region of size %E")
4173 : G_("%K%qD accessing %E or more bytes "
4174 "in a region of size %E")),
4175 exp, func, range[0], size)
4176 : warning_at (loc, opt,
4177 (maybe
4178 ? G_("%Kmay access %E or more bytes "
4179 "in a region of size %E")
4180 : G_("%Kaccessing %E or more bytes "
4181 "in a region of size %E")),
4182 exp, range[0], size));
4183 }
4184 else
4185 warned = (func
4186 ? warning_at (loc, opt,
4187 (maybe
4188 ? G_("%K%qD may access between %E and %E "
4189 "bytes in a region of size %E")
4190 : G_("%K%qD accessing between %E and %E "
4191 "bytes in a region of size %E")),
4192 exp, func, range[0], range[1],
4193 size)
4194 : warning_at (loc, opt,
4195 (maybe
4196 ? G_("%Kmay access between %E and %E bytes "
4197 "in a region of size %E")
4198 : G_("%Kaccessing between %E and %E bytes "
4199 "in a region of size %E")),
4200 exp, range[0], range[1],
4201 size));
4202 return warned;
4203 }
4204
4205 if (write)
4206 {
4207 if (tree_int_cst_equal (range[0], range[1]))
4208 warned = (func
4209 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4210 (maybe
4211 ? G_("%K%qD may write %E byte into a region "
4212 "of size %E")
4213 : G_("%K%qD writing %E byte into a region "
4214 "of size %E overflows the destination")),
4215 (maybe
4216 ? G_("%K%qD may write %E bytes into a region "
4217 "of size %E")
4218 : G_("%K%qD writing %E bytes into a region "
4219 "of size %E overflows the destination")),
4220 exp, func, range[0], size)
4221 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4222 (maybe
4223 ? G_("%Kmay write %E byte into a region "
4224 "of size %E")
4225 : G_("%Kwriting %E byte into a region "
4226 "of size %E overflows the destination")),
4227 (maybe
4228 ? G_("%Kmay write %E bytes into a region "
4229 "of size %E")
4230 : G_("%Kwriting %E bytes into a region "
4231 "of size %E overflows the destination")),
4232 exp, range[0], size));
4233 else if (tree_int_cst_sign_bit (range[1]))
4234 {
4235 /* Avoid printing the upper bound if it's invalid. */
4236 warned = (func
4237 ? warning_at (loc, opt,
4238 (maybe
4239 ? G_("%K%qD may write %E or more bytes "
4240 "into a region of size %E "
4241 "the destination")
4242 : G_("%K%qD writing %E or more bytes "
4243 "into a region of size %E overflows "
4244 "the destination")),
4245 exp, func, range[0], size)
4246 : warning_at (loc, opt,
4247 (maybe
4248 ? G_("%Kmay write %E or more bytes into "
4249 "a region of size %E")
4250 : G_("%Kwriting %E or more bytes into "
4251 "a region of size %E overflows "
4252 "the destination")),
4253 exp, range[0], size));
4254 }
4255 else
4256 warned = (func
4257 ? warning_at (loc, opt,
4258 (maybe
4259 ? G_("%K%qD may write between %E and %E bytes "
4260 "into a region of size %E")
4261 : G_("%K%qD writing between %E and %E bytes "
4262 "into a region of size %E overflows "
4263 "the destination")),
4264 exp, func, range[0], range[1],
4265 size)
4266 : warning_at (loc, opt,
4267 (maybe
4268 ? G_("%Kmay write between %E and %E bytes "
4269 "into a region of size %E")
4270 : G_("%Kwriting between %E and %E bytes "
4271 "into a region of size %E overflows "
4272 "the destination")),
4273 exp, range[0], range[1],
4274 size));
4275 return warned;
4276 }
4277
4278 if (read)
4279 {
4280 if (tree_int_cst_equal (range[0], range[1]))
4281 warned = (func
4282 ? warning_n (loc, OPT_Wstringop_overread,
4283 tree_to_uhwi (range[0]),
4284 (maybe
4285 ? G_("%K%qD may reade %E byte from a region "
4286 "of size %E")
4287 : G_("%K%qD reading %E byte from a region "
4288 "of size %E")),
4289 (maybe
4290 ? G_("%K%qD may read %E bytes from a region "
4291 "of size %E")
4292 : G_("%K%qD reading %E bytes from a region "
4293 "of size %E")),
4294 exp, func, range[0], size)
4295 : warning_n (loc, OPT_Wstringop_overread,
4296 tree_to_uhwi (range[0]),
4297 (maybe
4298 ? G_("%Kmay read %E byte from a region "
4299 "of size %E")
4300 : G_("%Kreading %E byte from a region "
4301 "of size %E")),
4302 (maybe
4303 ? G_("%Kmay read %E bytes from a region "
4304 "of size %E")
4305 : G_("%Kreading %E bytes from a region "
4306 "of size %E")),
4307 exp, range[0], size));
4308 else if (tree_int_cst_sign_bit (range[1]))
4309 {
4310 /* Avoid printing the upper bound if it's invalid. */
4311 warned = (func
4312 ? warning_at (loc, OPT_Wstringop_overread,
4313 (maybe
4314 ? G_("%K%qD may read %E or more bytes "
4315 "from a region of size %E")
4316 : G_("%K%qD reading %E or more bytes "
4317 "from a region of size %E")),
4318 exp, func, range[0], size)
4319 : warning_at (loc, OPT_Wstringop_overread,
4320 (maybe
4321 ? G_("%Kmay read %E or more bytes "
4322 "from a region of size %E")
4323 : G_("%Kreading %E or more bytes "
4324 "from a region of size %E")),
4325 exp, range[0], size));
4326 }
4327 else
4328 warned = (func
4329 ? warning_at (loc, OPT_Wstringop_overread,
4330 (maybe
4331 ? G_("%K%qD may read between %E and %E bytes "
4332 "from a region of size %E")
4333 : G_("%K%qD reading between %E and %E bytes "
4334 "from a region of size %E")),
4335 exp, func, range[0], range[1], size)
4336 : warning_at (loc, opt,
4337 (maybe
4338 ? G_("%Kmay read between %E and %E bytes "
4339 "from a region of size %E")
4340 : G_("%Kreading between %E and %E bytes "
4341 "from a region of size %E")),
4342 exp, range[0], range[1], size));
4343
4344 if (warned)
4345 TREE_NO_WARNING (exp) = true;
4346
4347 return warned;
4348 }
4349
4350 if (tree_int_cst_equal (range[0], range[1])
4351 || tree_int_cst_sign_bit (range[1]))
4352 warned = (func
4353 ? warning_n (loc, OPT_Wstringop_overread,
4354 tree_to_uhwi (range[0]),
4355 "%K%qD epecting %E byte in a region of size %E",
4356 "%K%qD expecting %E bytes in a region of size %E",
4357 exp, func, range[0], size)
4358 : warning_n (loc, OPT_Wstringop_overread,
4359 tree_to_uhwi (range[0]),
4360 "%Kexpecting %E byte in a region of size %E",
4361 "%Kexpecting %E bytes in a region of size %E",
4362 exp, range[0], size));
4363 else if (tree_int_cst_sign_bit (range[1]))
4364 {
4365 /* Avoid printing the upper bound if it's invalid. */
4366 warned = (func
4367 ? warning_at (loc, OPT_Wstringop_overread,
4368 "%K%qD expecting %E or more bytes in a region "
4369 "of size %E",
4370 exp, func, range[0], size)
4371 : warning_at (loc, OPT_Wstringop_overread,
4372 "%Kexpecting %E or more bytes in a region "
4373 "of size %E",
4374 exp, range[0], size));
4375 }
4376 else
4377 warned = (func
4378 ? warning_at (loc, OPT_Wstringop_overread,
4379 "%K%qD expecting between %E and %E bytes in "
4380 "a region of size %E",
4381 exp, func, range[0], range[1], size)
4382 : warning_at (loc, OPT_Wstringop_overread,
4383 "%Kexpectting between %E and %E bytes in "
4384 "a region of size %E",
4385 exp, range[0], range[1], size));
4386
4387 if (warned)
4388 TREE_NO_WARNING (exp) = true;
4389
4390 return warned;
4391 }
4392
4393 /* Issue one inform message describing each target of an access REF.
4394 WRITE is set for a write access and clear for a read access. */
4395
4396 void
4397 access_ref::inform_access (access_mode mode) const
4398 {
4399 const access_ref &aref = *this;
4400 if (!aref.ref)
4401 return;
4402
4403 if (aref.phi ())
4404 {
4405 /* Set MAXREF to refer to the largest object and fill ALL_REFS
4406 with data for all objects referenced by the PHI arguments. */
4407 access_ref maxref;
4408 auto_vec<access_ref> all_refs;
4409 if (!get_ref (&all_refs, &maxref))
4410 return;
4411
4412 /* Except for MAXREF, the rest of the arguments' offsets need not
4413 reflect one added to the PHI itself. Determine the latter from
4414 MAXREF on which the result is based. */
4415 const offset_int orng[] =
4416 {
4417 offrng[0] - maxref.offrng[0],
4418 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
4419 };
4420
4421 /* Add the final PHI's offset to that of each of the arguments
4422 and recurse to issue an inform message for it. */
4423 for (unsigned i = 0; i != all_refs.length (); ++i)
4424 {
4425 /* Skip any PHIs; those could lead to infinite recursion. */
4426 if (all_refs[i].phi ())
4427 continue;
4428
4429 all_refs[i].add_offset (orng[0], orng[1]);
4430 all_refs[i].inform_access (mode);
4431 }
4432 return;
4433 }
4434
4435 /* Convert offset range and avoid including a zero range since it
4436 isn't necessarily meaningful. */
4437 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
4438 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
4439 HOST_WIDE_INT minoff;
4440 HOST_WIDE_INT maxoff = diff_max;
4441 if (wi::fits_shwi_p (aref.offrng[0]))
4442 minoff = aref.offrng[0].to_shwi ();
4443 else
4444 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
4445
4446 if (wi::fits_shwi_p (aref.offrng[1]))
4447 maxoff = aref.offrng[1].to_shwi ();
4448
4449 if (maxoff <= diff_min || maxoff >= diff_max)
4450 /* Avoid mentioning an upper bound that's equal to or in excess
4451 of the maximum of ptrdiff_t. */
4452 maxoff = minoff;
4453
4454 /* Convert size range and always include it since all sizes are
4455 meaningful. */
4456 unsigned long long minsize = 0, maxsize = 0;
4457 if (wi::fits_shwi_p (aref.sizrng[0])
4458 && wi::fits_shwi_p (aref.sizrng[1]))
4459 {
4460 minsize = aref.sizrng[0].to_shwi ();
4461 maxsize = aref.sizrng[1].to_shwi ();
4462 }
4463
4464 /* SIZRNG doesn't necessarily have the same range as the allocation
4465 size determined by gimple_call_alloc_size (). */
4466 char sizestr[80];
4467 if (minsize == maxsize)
4468 sprintf (sizestr, "%llu", minsize);
4469 else
4470 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
4471
4472 char offstr[80];
4473 if (minoff == 0
4474 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
4475 offstr[0] = '\0';
4476 else if (minoff == maxoff)
4477 sprintf (offstr, "%lli", (long long) minoff);
4478 else
4479 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
4480
4481 location_t loc = UNKNOWN_LOCATION;
4482
4483 tree ref = this->ref;
4484 tree allocfn = NULL_TREE;
4485 if (TREE_CODE (ref) == SSA_NAME)
4486 {
4487 gimple *stmt = SSA_NAME_DEF_STMT (ref);
4488 if (is_gimple_call (stmt))
4489 {
4490 loc = gimple_location (stmt);
4491 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4492 {
4493 /* Strip the SSA_NAME suffix from the variable name and
4494 recreate an identifier with the VLA's original name. */
4495 ref = gimple_call_lhs (stmt);
4496 if (SSA_NAME_IDENTIFIER (ref))
4497 {
4498 ref = SSA_NAME_IDENTIFIER (ref);
4499 const char *id = IDENTIFIER_POINTER (ref);
4500 size_t len = strcspn (id, ".$");
4501 if (!len)
4502 len = strlen (id);
4503 ref = get_identifier_with_length (id, len);
4504 }
4505 }
4506 else
4507 {
4508 /* Except for VLAs, retrieve the allocation function. */
4509 allocfn = gimple_call_fndecl (stmt);
4510 if (!allocfn)
4511 allocfn = gimple_call_fn (stmt);
4512 if (TREE_CODE (allocfn) == SSA_NAME)
4513 {
4514 /* For an ALLOC_CALL via a function pointer make a small
4515 effort to determine the destination of the pointer. */
4516 gimple *def = SSA_NAME_DEF_STMT (allocfn);
4517 if (gimple_assign_single_p (def))
4518 {
4519 tree rhs = gimple_assign_rhs1 (def);
4520 if (DECL_P (rhs))
4521 allocfn = rhs;
4522 else if (TREE_CODE (rhs) == COMPONENT_REF)
4523 allocfn = TREE_OPERAND (rhs, 1);
4524 }
4525 }
4526 }
4527 }
4528 else if (gimple_nop_p (stmt))
4529 /* Handle DECL_PARM below. */
4530 ref = SSA_NAME_VAR (ref);
4531 }
4532
4533 if (DECL_P (ref))
4534 loc = DECL_SOURCE_LOCATION (ref);
4535 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
4536 loc = EXPR_LOCATION (ref);
4537 else if (TREE_CODE (ref) != IDENTIFIER_NODE
4538 && TREE_CODE (ref) != SSA_NAME)
4539 return;
4540
4541 if (mode == access_read_write || mode == access_write_only)
4542 {
4543 if (allocfn == NULL_TREE)
4544 {
4545 if (*offstr)
4546 inform (loc, "at offset %s into destination object %qE of size %s",
4547 offstr, ref, sizestr);
4548 else
4549 inform (loc, "destination object %qE of size %s", ref, sizestr);
4550 return;
4551 }
4552
4553 if (*offstr)
4554 inform (loc,
4555 "at offset %s into destination object of size %s "
4556 "allocated by %qE", offstr, sizestr, allocfn);
4557 else
4558 inform (loc, "destination object of size %s allocated by %qE",
4559 sizestr, allocfn);
4560 return;
4561 }
4562
4563 if (allocfn == NULL_TREE)
4564 {
4565 if (*offstr)
4566 inform (loc, "at offset %s into source object %qE of size %s",
4567 offstr, ref, sizestr);
4568 else
4569 inform (loc, "source object %qE of size %s", ref, sizestr);
4570
4571 return;
4572 }
4573
4574 if (*offstr)
4575 inform (loc,
4576 "at offset %s into source object of size %s allocated by %qE",
4577 offstr, sizestr, allocfn);
4578 else
4579 inform (loc, "source object of size %s allocated by %qE",
4580 sizestr, allocfn);
4581 }
4582
4583 /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
4584 by BNDRNG if nonnull and valid. */
4585
4586 static void
4587 get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
4588 {
4589 if (bound)
4590 get_size_range (bound, range);
4591
4592 if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U))
4593 return;
4594
4595 if (range[0] && TREE_CODE (range[0]) == INTEGER_CST)
4596 {
4597 offset_int r[] =
4598 { wi::to_offset (range[0]), wi::to_offset (range[1]) };
4599 if (r[0] < bndrng[0])
4600 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4601 if (bndrng[1] < r[1])
4602 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4603 }
4604 else
4605 {
4606 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4607 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4608 }
4609 }
4610
4611 /* Try to verify that the sizes and lengths of the arguments to a string
4612 manipulation function given by EXP are within valid bounds and that
4613 the operation does not lead to buffer overflow or read past the end.
4614 Arguments other than EXP may be null. When non-null, the arguments
4615 have the following meaning:
4616 DST is the destination of a copy call or NULL otherwise.
4617 SRC is the source of a copy call or NULL otherwise.
4618 DSTWRITE is the number of bytes written into the destination obtained
4619 from the user-supplied size argument to the function (such as in
4620 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
4621 MAXREAD is the user-supplied bound on the length of the source sequence
4622 (such as in strncat(d, s, N). It specifies the upper limit on the number
4623 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
4624 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
4625 expression EXP is a string function call (as opposed to a memory call
4626 like memcpy). As an exception, SRCSTR can also be an integer denoting
4627 the precomputed size of the source string or object (for functions like
4628 memcpy).
4629 DSTSIZE is the size of the destination object.
4630
4631 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
4632 SIZE_MAX.
4633
4634 WRITE is true for write accesses, READ is true for reads. Both are
4635 false for simple size checks in calls to functions that neither read
4636 from nor write to the region.
4637
4638 When nonnull, PAD points to a more detailed description of the access.
4639
4640 If the call is successfully verified as safe return true, otherwise
4641 return false. */
4642
4643 bool
4644 check_access (tree exp, tree dstwrite,
4645 tree maxread, tree srcstr, tree dstsize,
4646 access_mode mode, const access_data *pad /* = NULL */)
4647 {
4648 /* The size of the largest object is half the address space, or
4649 PTRDIFF_MAX. (This is way too permissive.) */
4650 tree maxobjsize = max_object_size ();
4651
4652 /* Either an approximate/minimum the length of the source string for
4653 string functions or the size of the source object for raw memory
4654 functions. */
4655 tree slen = NULL_TREE;
4656
4657 /* The range of the access in bytes; first set to the write access
4658 for functions that write and then read for those that also (or
4659 just) read. */
4660 tree range[2] = { NULL_TREE, NULL_TREE };
4661
4662 /* Set to true when the exact number of bytes written by a string
4663 function like strcpy is not known and the only thing that is
4664 known is that it must be at least one (for the terminating nul). */
4665 bool at_least_one = false;
4666 if (srcstr)
4667 {
4668 /* SRCSTR is normally a pointer to string but as a special case
4669 it can be an integer denoting the length of a string. */
4670 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
4671 {
4672 if (!check_nul_terminated_array (exp, srcstr, maxread))
4673 return false;
4674 /* Try to determine the range of lengths the source string
4675 refers to. If it can be determined and is less than
4676 the upper bound given by MAXREAD add one to it for
4677 the terminating nul. Otherwise, set it to one for
4678 the same reason, or to MAXREAD as appropriate. */
4679 c_strlen_data lendata = { };
4680 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
4681 range[0] = lendata.minlen;
4682 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
4683 if (range[0]
4684 && TREE_CODE (range[0]) == INTEGER_CST
4685 && TREE_CODE (range[1]) == INTEGER_CST
4686 && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
4687 {
4688 if (maxread && tree_int_cst_le (maxread, range[0]))
4689 range[0] = range[1] = maxread;
4690 else
4691 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
4692 range[0], size_one_node);
4693
4694 if (maxread && tree_int_cst_le (maxread, range[1]))
4695 range[1] = maxread;
4696 else if (!integer_all_onesp (range[1]))
4697 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
4698 range[1], size_one_node);
4699
4700 slen = range[0];
4701 }
4702 else
4703 {
4704 at_least_one = true;
4705 slen = size_one_node;
4706 }
4707 }
4708 else
4709 slen = srcstr;
4710 }
4711
4712 if (!dstwrite && !maxread)
4713 {
4714 /* When the only available piece of data is the object size
4715 there is nothing to do. */
4716 if (!slen)
4717 return true;
4718
4719 /* Otherwise, when the length of the source sequence is known
4720 (as with strlen), set DSTWRITE to it. */
4721 if (!range[0])
4722 dstwrite = slen;
4723 }
4724
4725 if (!dstsize)
4726 dstsize = maxobjsize;
4727
4728 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4729 if valid. */
4730 get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL);
4731
4732 tree func = get_callee_fndecl (exp);
4733 /* Read vs write access by built-ins can be determined from the const
4734 qualifiers on the pointer argument. In the absence of attribute
4735 access, non-const qualified pointer arguments to user-defined
4736 functions are assumed to both read and write the objects. */
4737 const bool builtin = func ? fndecl_built_in_p (func) : false;
4738
4739 /* First check the number of bytes to be written against the maximum
4740 object size. */
4741 if (range[0]
4742 && TREE_CODE (range[0]) == INTEGER_CST
4743 && tree_int_cst_lt (maxobjsize, range[0]))
4744 {
4745 location_t loc = tree_inlined_location (exp);
4746 maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
4747 NULL_TREE, pad);
4748 return false;
4749 }
4750
4751 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4752 constant, and in range of unsigned HOST_WIDE_INT. */
4753 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
4754
4755 /* Next check the number of bytes to be written against the destination
4756 object size. */
4757 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
4758 {
4759 if (range[0]
4760 && TREE_CODE (range[0]) == INTEGER_CST
4761 && ((tree_fits_uhwi_p (dstsize)
4762 && tree_int_cst_lt (dstsize, range[0]))
4763 || (dstwrite
4764 && tree_fits_uhwi_p (dstwrite)
4765 && tree_int_cst_lt (dstwrite, range[0]))))
4766 {
4767 if (TREE_NO_WARNING (exp)
4768 || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref)))
4769 return false;
4770
4771 location_t loc = tree_inlined_location (exp);
4772 bool warned = false;
4773 if (dstwrite == slen && at_least_one)
4774 {
4775 /* This is a call to strcpy with a destination of 0 size
4776 and a source of unknown length. The call will write
4777 at least one byte past the end of the destination. */
4778 warned = (func
4779 ? warning_at (loc, OPT_Wstringop_overflow_,
4780 "%K%qD writing %E or more bytes into "
4781 "a region of size %E overflows "
4782 "the destination",
4783 exp, func, range[0], dstsize)
4784 : warning_at (loc, OPT_Wstringop_overflow_,
4785 "%Kwriting %E or more bytes into "
4786 "a region of size %E overflows "
4787 "the destination",
4788 exp, range[0], dstsize));
4789 }
4790 else
4791 {
4792 const bool read
4793 = mode == access_read_only || mode == access_read_write;
4794 const bool write
4795 = mode == access_write_only || mode == access_read_write;
4796 const bool maybe = pad && pad->dst.parmarray;
4797 warned = warn_for_access (loc, func, exp,
4798 OPT_Wstringop_overflow_,
4799 range, dstsize,
4800 write, read && !builtin, maybe);
4801 }
4802
4803 if (warned)
4804 {
4805 TREE_NO_WARNING (exp) = true;
4806 if (pad)
4807 pad->dst.inform_access (pad->mode);
4808 }
4809
4810 /* Return error when an overflow has been detected. */
4811 return false;
4812 }
4813 }
4814
4815 /* Check the maximum length of the source sequence against the size
4816 of the destination object if known, or against the maximum size
4817 of an object. */
4818 if (maxread)
4819 {
4820 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4821 PAD is nonnull and BNDRNG is valid. */
4822 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4823
4824 location_t loc = tree_inlined_location (exp);
4825 tree size = dstsize;
4826 if (pad && pad->mode == access_read_only)
4827 size = wide_int_to_tree (sizetype, pad->src.sizrng[1]);
4828
4829 if (range[0] && maxread && tree_fits_uhwi_p (size))
4830 {
4831 if (tree_int_cst_lt (maxobjsize, range[0]))
4832 {
4833 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
4834 range, size, pad);
4835 return false;
4836 }
4837
4838 if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
4839 {
4840 int opt = (dstwrite || mode != access_read_only
4841 ? OPT_Wstringop_overflow_
4842 : OPT_Wstringop_overread);
4843 maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
4844 return false;
4845 }
4846 }
4847
4848 maybe_warn_nonstring_arg (func, exp);
4849 }
4850
4851 /* Check for reading past the end of SRC. */
4852 bool overread = (slen
4853 && slen == srcstr
4854 && dstwrite
4855 && range[0]
4856 && TREE_CODE (slen) == INTEGER_CST
4857 && tree_int_cst_lt (slen, range[0]));
4858 /* If none is determined try to get a better answer based on the details
4859 in PAD. */
4860 if (!overread
4861 && pad
4862 && pad->src.sizrng[1] >= 0
4863 && pad->src.offrng[0] >= 0
4864 && (pad->src.offrng[1] < 0
4865 || pad->src.offrng[0] <= pad->src.offrng[1]))
4866 {
4867 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4868 PAD is nonnull and BNDRNG is valid. */
4869 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4870 /* Set OVERREAD for reads starting just past the end of an object. */
4871 overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4872 range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]);
4873 slen = size_zero_node;
4874 }
4875
4876 if (overread)
4877 {
4878 if (TREE_NO_WARNING (exp)
4879 || (srcstr && TREE_NO_WARNING (srcstr))
4880 || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref)))
4881 return false;
4882
4883 location_t loc = tree_inlined_location (exp);
4884 const bool read
4885 = mode == access_read_only || mode == access_read_write;
4886 const bool maybe = pad && pad->dst.parmarray;
4887 if (warn_for_access (loc, func, exp, OPT_Wstringop_overread, range,
4888 slen, false, read, maybe))
4889 {
4890 TREE_NO_WARNING (exp) = true;
4891 if (pad)
4892 pad->src.inform_access (access_read_only);
4893 }
4894 return false;
4895 }
4896
4897 return true;
4898 }
4899
4900 /* A convenience wrapper for check_access above to check access
4901 by a read-only function like puts. */
4902
4903 static bool
4904 check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4905 int ost /* = 1 */)
4906 {
4907 if (!warn_stringop_overread)
4908 return true;
4909
4910 access_data data (exp, access_read_only, NULL_TREE, false, bound, true);
4911 compute_objsize (src, ost, &data.src);
4912 return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound,
4913 /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode,
4914 &data);
4915 }
4916
4917 /* If STMT is a call to an allocation function, returns the constant
4918 maximum size of the object allocated by the call represented as
4919 sizetype. If nonnull, sets RNG1[] to the range of the size.
4920 When nonnull, uses RVALS for range information, otherwise calls
4921 get_range_info to get it.
4922 Returns null when STMT is not a call to a valid allocation function. */
4923
4924 tree
4925 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
4926 range_query * /* = NULL */)
4927 {
4928 if (!stmt)
4929 return NULL_TREE;
4930
4931 tree allocfntype;
4932 if (tree fndecl = gimple_call_fndecl (stmt))
4933 allocfntype = TREE_TYPE (fndecl);
4934 else
4935 allocfntype = gimple_call_fntype (stmt);
4936
4937 if (!allocfntype)
4938 return NULL_TREE;
4939
4940 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
4941 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
4942 if (!at)
4943 {
4944 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4945 return NULL_TREE;
4946
4947 argidx1 = 0;
4948 }
4949
4950 unsigned nargs = gimple_call_num_args (stmt);
4951
4952 if (argidx1 == UINT_MAX)
4953 {
4954 tree atval = TREE_VALUE (at);
4955 if (!atval)
4956 return NULL_TREE;
4957
4958 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4959 if (nargs <= argidx1)
4960 return NULL_TREE;
4961
4962 atval = TREE_CHAIN (atval);
4963 if (atval)
4964 {
4965 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4966 if (nargs <= argidx2)
4967 return NULL_TREE;
4968 }
4969 }
4970
4971 tree size = gimple_call_arg (stmt, argidx1);
4972
4973 wide_int rng1_buf[2];
4974 /* If RNG1 is not set, use the buffer. */
4975 if (!rng1)
4976 rng1 = rng1_buf;
4977
4978 /* Use maximum precision to avoid overflow below. */
4979 const int prec = ADDR_MAX_PRECISION;
4980
4981 {
4982 tree r[2];
4983 /* Determine the largest valid range size, including zero. */
4984 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4985 return NULL_TREE;
4986 rng1[0] = wi::to_wide (r[0], prec);
4987 rng1[1] = wi::to_wide (r[1], prec);
4988 }
4989
4990 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
4991 return fold_convert (sizetype, size);
4992
4993 /* To handle ranges do the math in wide_int and return the product
4994 of the upper bounds as a constant. Ignore anti-ranges. */
4995 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
4996 wide_int rng2[2];
4997 {
4998 tree r[2];
4999 /* As above, use the full non-negative range on failure. */
5000 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
5001 return NULL_TREE;
5002 rng2[0] = wi::to_wide (r[0], prec);
5003 rng2[1] = wi::to_wide (r[1], prec);
5004 }
5005
5006 /* Compute products of both bounds for the caller but return the lesser
5007 of SIZE_MAX and the product of the upper bounds as a constant. */
5008 rng1[0] = rng1[0] * rng2[0];
5009 rng1[1] = rng1[1] * rng2[1];
5010
5011 const tree size_max = TYPE_MAX_VALUE (sizetype);
5012 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
5013 {
5014 rng1[1] = wi::to_wide (size_max, prec);
5015 return size_max;
5016 }
5017
5018 return wide_int_to_tree (sizetype, rng1[1]);
5019 }
5020
5021 /* For an access to an object referenced to by the function parameter PTR
5022 of pointer type, and set RNG[] to the range of sizes of the object
5023 obtainedfrom the attribute access specification for the current function.
5024 Set STATIC_ARRAY if the array parameter has been declared [static].
5025 Return the function parameter on success and null otherwise. */
5026
5027 tree
5028 gimple_parm_array_size (tree ptr, wide_int rng[2],
5029 bool *static_array /* = NULL */)
5030 {
5031 /* For a function argument try to determine the byte size of the array
5032 from the current function declaratation (e.g., attribute access or
5033 related). */
5034 tree var = SSA_NAME_VAR (ptr);
5035 if (TREE_CODE (var) != PARM_DECL)
5036 return NULL_TREE;
5037
5038 const unsigned prec = TYPE_PRECISION (sizetype);
5039
5040 rdwr_map rdwr_idx;
5041 attr_access *access = get_parm_access (rdwr_idx, var);
5042 if (!access)
5043 return NULL_TREE;
5044
5045 if (access->sizarg != UINT_MAX)
5046 {
5047 /* TODO: Try to extract the range from the argument based on
5048 those of subsequent assertions or based on known calls to
5049 the current function. */
5050 return NULL_TREE;
5051 }
5052
5053 if (!access->minsize)
5054 return NULL_TREE;
5055
5056 /* Only consider ordinary array bound at level 2 (or above if it's
5057 ever added). */
5058 if (warn_array_parameter < 2 && !access->static_p)
5059 return NULL_TREE;
5060
5061 if (static_array)
5062 *static_array = access->static_p;
5063
5064 rng[0] = wi::zero (prec);
5065 rng[1] = wi::uhwi (access->minsize, prec);
5066 /* Multiply the array bound encoded in the attribute by the size
5067 of what the pointer argument to which it decays points to. */
5068 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
5069 tree size = TYPE_SIZE_UNIT (eltype);
5070 if (!size || TREE_CODE (size) != INTEGER_CST)
5071 return NULL_TREE;
5072
5073 rng[1] *= wi::to_wide (size, prec);
5074 return var;
5075 }
5076
5077 /* Wrapper around the wide_int overload of get_range that accepts
5078 offset_int instead. For middle end expressions returns the same
5079 result. For a subset of nonconstamt expressions emitted by the front
5080 end determines a more precise range than would be possible otherwise. */
5081
5082 static bool
5083 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
5084 {
5085 offset_int add = 0;
5086 if (TREE_CODE (x) == PLUS_EXPR)
5087 {
5088 /* Handle constant offsets in pointer addition expressions seen
5089 n the front end IL. */
5090 tree op = TREE_OPERAND (x, 1);
5091 if (TREE_CODE (op) == INTEGER_CST)
5092 {
5093 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
5094 add = wi::to_offset (op);
5095 x = TREE_OPERAND (x, 0);
5096 }
5097 }
5098
5099 if (TREE_CODE (x) == NOP_EXPR)
5100 /* Also handle conversions to sizetype seen in the front end IL. */
5101 x = TREE_OPERAND (x, 0);
5102
5103 tree type = TREE_TYPE (x);
5104 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
5105 return false;
5106
5107 if (TREE_CODE (x) != INTEGER_CST
5108 && TREE_CODE (x) != SSA_NAME)
5109 {
5110 if (TYPE_UNSIGNED (type)
5111 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
5112 type = signed_type_for (type);
5113
5114 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
5115 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
5116 return x;
5117 }
5118
5119 wide_int wr[2];
5120 if (!get_range (x, stmt, wr, rvals))
5121 return false;
5122
5123 signop sgn = SIGNED;
5124 /* Only convert signed integers or unsigned sizetype to a signed
5125 offset and avoid converting large positive values in narrower
5126 types to negative offsets. */
5127 if (TYPE_UNSIGNED (type)
5128 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
5129 sgn = UNSIGNED;
5130
5131 r[0] = offset_int::from (wr[0], sgn);
5132 r[1] = offset_int::from (wr[1], sgn);
5133 return true;
5134 }
5135
5136 /* Return the argument that the call STMT to a built-in function returns
5137 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
5138 from the argument reflected in the value returned by the built-in if it
5139 can be determined, otherwise to 0 and HWI_M1U respectively. */
5140
5141 static tree
5142 gimple_call_return_array (gimple *stmt, offset_int offrng[2],
5143 range_query *rvals)
5144 {
5145 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5146 || gimple_call_num_args (stmt) < 1)
5147 return NULL_TREE;
5148
5149 tree fn = gimple_call_fndecl (stmt);
5150 switch (DECL_FUNCTION_CODE (fn))
5151 {
5152 case BUILT_IN_MEMCPY:
5153 case BUILT_IN_MEMCPY_CHK:
5154 case BUILT_IN_MEMMOVE:
5155 case BUILT_IN_MEMMOVE_CHK:
5156 case BUILT_IN_MEMSET:
5157 case BUILT_IN_STPCPY:
5158 case BUILT_IN_STPCPY_CHK:
5159 case BUILT_IN_STPNCPY:
5160 case BUILT_IN_STPNCPY_CHK:
5161 case BUILT_IN_STRCAT:
5162 case BUILT_IN_STRCAT_CHK:
5163 case BUILT_IN_STRCPY:
5164 case BUILT_IN_STRCPY_CHK:
5165 case BUILT_IN_STRNCAT:
5166 case BUILT_IN_STRNCAT_CHK:
5167 case BUILT_IN_STRNCPY:
5168 case BUILT_IN_STRNCPY_CHK:
5169 offrng[0] = offrng[1] = 0;
5170 return gimple_call_arg (stmt, 0);
5171
5172 case BUILT_IN_MEMPCPY:
5173 case BUILT_IN_MEMPCPY_CHK:
5174 {
5175 tree off = gimple_call_arg (stmt, 2);
5176 if (!get_offset_range (off, stmt, offrng, rvals))
5177 {
5178 offrng[0] = 0;
5179 offrng[1] = HOST_WIDE_INT_M1U;
5180 }
5181 return gimple_call_arg (stmt, 0);
5182 }
5183
5184 case BUILT_IN_MEMCHR:
5185 {
5186 tree off = gimple_call_arg (stmt, 2);
5187 if (get_offset_range (off, stmt, offrng, rvals))
5188 offrng[0] = 0;
5189 else
5190 {
5191 offrng[0] = 0;
5192 offrng[1] = HOST_WIDE_INT_M1U;
5193 }
5194 return gimple_call_arg (stmt, 0);
5195 }
5196
5197 case BUILT_IN_STRCHR:
5198 case BUILT_IN_STRRCHR:
5199 case BUILT_IN_STRSTR:
5200 {
5201 offrng[0] = 0;
5202 offrng[1] = HOST_WIDE_INT_M1U;
5203 }
5204 return gimple_call_arg (stmt, 0);
5205
5206 default:
5207 break;
5208 }
5209
5210 return NULL_TREE;
5211 }
5212
5213 /* A helper of compute_objsize() to determine the size from an assignment
5214 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. */
5215
5216 static bool
5217 handle_min_max_size (gimple *stmt, int ostype, access_ref *pref,
5218 ssa_name_limit_t &snlim, pointer_query *qry)
5219 {
5220 tree_code code = gimple_assign_rhs_code (stmt);
5221
5222 tree ptr = gimple_assign_rhs1 (stmt);
5223
5224 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
5225 Determine the size/offset of each and use the one with more or less
5226 space remaining, respectively. If either fails, use the information
5227 determined from the other instead, adjusted up or down as appropriate
5228 for the expression. */
5229 access_ref aref[2] = { *pref, *pref };
5230 if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry))
5231 {
5232 aref[0].base0 = false;
5233 aref[0].offrng[0] = aref[0].offrng[1] = 0;
5234 aref[0].add_max_offset ();
5235 aref[0].set_max_size_range ();
5236 }
5237
5238 ptr = gimple_assign_rhs2 (stmt);
5239 if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry))
5240 {
5241 aref[1].base0 = false;
5242 aref[1].offrng[0] = aref[1].offrng[1] = 0;
5243 aref[1].add_max_offset ();
5244 aref[1].set_max_size_range ();
5245 }
5246
5247 if (!aref[0].ref && !aref[1].ref)
5248 /* Fail if the identity of neither argument could be determined. */
5249 return false;
5250
5251 bool i0 = false;
5252 if (aref[0].ref && aref[0].base0)
5253 {
5254 if (aref[1].ref && aref[1].base0)
5255 {
5256 /* If the object referenced by both arguments has been determined
5257 set *PREF to the one with more or less space remainng, whichever
5258 is appopriate for CODE.
5259 TODO: Indicate when the objects are distinct so it can be
5260 diagnosed. */
5261 i0 = code == MAX_EXPR;
5262 const bool i1 = !i0;
5263
5264 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
5265 *pref = aref[i1];
5266 else
5267 *pref = aref[i0];
5268 return true;
5269 }
5270
5271 /* If only the object referenced by one of the arguments could be
5272 determined, use it and... */
5273 *pref = aref[0];
5274 i0 = true;
5275 }
5276 else
5277 *pref = aref[1];
5278
5279 const bool i1 = !i0;
5280 /* ...see if the offset obtained from the other pointer can be used
5281 to tighten up the bound on the offset obtained from the first. */
5282 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
5283 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
5284 {
5285 pref->offrng[0] = aref[i0].offrng[0];
5286 pref->offrng[1] = aref[i0].offrng[1];
5287 }
5288 return true;
5289 }
5290
5291 /* Helper to compute the size of the object referenced by the PTR
5292 expression which must have pointer type, using Object Size type
5293 OSTYPE (only the least significant 2 bits are used).
5294 On success, sets PREF->REF to the DECL of the referenced object
5295 if it's unique, otherwise to null, PREF->OFFRNG to the range of
5296 offsets into it, and PREF->SIZRNG to the range of sizes of
5297 the object(s).
5298 SNLIM is used to avoid visiting the same PHI operand multiple
5299 times, and, when nonnull, RVALS to determine range information.
5300 Returns true on success, false when a meaningful size (or range)
5301 cannot be determined.
5302
5303 The function is intended for diagnostics and should not be used
5304 to influence code generation or optimization. */
5305
5306 static bool
5307 compute_objsize_r (tree ptr, int ostype, access_ref *pref,
5308 ssa_name_limit_t &snlim, pointer_query *qry)
5309 {
5310 STRIP_NOPS (ptr);
5311
5312 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
5313 if (addr)
5314 {
5315 --pref->deref;
5316 ptr = TREE_OPERAND (ptr, 0);
5317 }
5318
5319 if (DECL_P (ptr))
5320 {
5321 pref->ref = ptr;
5322
5323 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
5324 {
5325 /* Set the maximum size if the reference is to the pointer
5326 itself (as opposed to what it points to). */
5327 pref->set_max_size_range ();
5328 return true;
5329 }
5330
5331 if (tree size = decl_init_size (ptr, false))
5332 if (TREE_CODE (size) == INTEGER_CST)
5333 {
5334 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5335 return true;
5336 }
5337
5338 pref->set_max_size_range ();
5339 return true;
5340 }
5341
5342 const tree_code code = TREE_CODE (ptr);
5343 range_query *const rvals = qry ? qry->rvals : NULL;
5344
5345 if (code == BIT_FIELD_REF)
5346 {
5347 tree ref = TREE_OPERAND (ptr, 0);
5348 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5349 return false;
5350
5351 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
5352 pref->add_offset (off / BITS_PER_UNIT);
5353 return true;
5354 }
5355
5356 if (code == COMPONENT_REF)
5357 {
5358 tree ref = TREE_OPERAND (ptr, 0);
5359 if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
5360 /* In accesses through union types consider the entire unions
5361 rather than just their members. */
5362 ostype = 0;
5363 tree field = TREE_OPERAND (ptr, 1);
5364
5365 if (ostype == 0)
5366 {
5367 /* In OSTYPE zero (for raw memory functions like memcpy), use
5368 the maximum size instead if the identity of the enclosing
5369 object cannot be determined. */
5370 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5371 return false;
5372
5373 /* Otherwise, use the size of the enclosing object and add
5374 the offset of the member to the offset computed so far. */
5375 tree offset = byte_position (field);
5376 if (TREE_CODE (offset) == INTEGER_CST)
5377 pref->add_offset (wi::to_offset (offset));
5378 else
5379 pref->add_max_offset ();
5380
5381 if (!pref->ref)
5382 /* REF may have been already set to an SSA_NAME earlier
5383 to provide better context for diagnostics. In that case,
5384 leave it unchanged. */
5385 pref->ref = ref;
5386 return true;
5387 }
5388
5389 pref->ref = field;
5390
5391 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
5392 {
5393 /* Set maximum size if the reference is to the pointer member
5394 itself (as opposed to what it points to). */
5395 pref->set_max_size_range ();
5396 return true;
5397 }
5398
5399 /* SAM is set for array members that might need special treatment. */
5400 special_array_member sam;
5401 tree size = component_ref_size (ptr, &sam);
5402 if (sam == special_array_member::int_0)
5403 pref->sizrng[0] = pref->sizrng[1] = 0;
5404 else if (!pref->trail1special && sam == special_array_member::trail_1)
5405 pref->sizrng[0] = pref->sizrng[1] = 1;
5406 else if (size && TREE_CODE (size) == INTEGER_CST)
5407 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5408 else
5409 {
5410 /* When the size of the member is unknown it's either a flexible
5411 array member or a trailing special array member (either zero
5412 length or one-element). Set the size to the maximum minus
5413 the constant size of the type. */
5414 pref->sizrng[0] = 0;
5415 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5416 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
5417 if (TREE_CODE (recsize) == INTEGER_CST)
5418 pref->sizrng[1] -= wi::to_offset (recsize);
5419 }
5420 return true;
5421 }
5422
5423 if (code == ARRAY_REF || code == MEM_REF)
5424 {
5425 ++pref->deref;
5426
5427 tree ref = TREE_OPERAND (ptr, 0);
5428 tree reftype = TREE_TYPE (ref);
5429 if (!addr && code == ARRAY_REF
5430 && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
5431 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
5432 of known bound. */
5433 return false;
5434
5435 if (code == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
5436 {
5437 /* Give up for MEM_REFs of vector types; those may be synthesized
5438 from multiple assignments to consecutive data members. See PR
5439 93200.
5440 FIXME: Deal with this more generally, e.g., by marking up such
5441 MEM_REFs at the time they're created. */
5442 reftype = TREE_TYPE (reftype);
5443 if (TREE_CODE (reftype) == VECTOR_TYPE)
5444 return false;
5445 }
5446
5447 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5448 return false;
5449
5450 offset_int orng[2];
5451 tree off = pref->eval (TREE_OPERAND (ptr, 1));
5452 if (!get_offset_range (off, NULL, orng, rvals))
5453 {
5454 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
5455 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5456 orng[0] = -orng[1] - 1;
5457 }
5458
5459 if (TREE_CODE (ptr) == ARRAY_REF)
5460 {
5461 /* Convert the array index range determined above to a byte
5462 offset. */
5463 tree lowbnd = array_ref_low_bound (ptr);
5464 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
5465 {
5466 /* Adjust the index by the low bound of the array domain
5467 (normally zero but 1 in Fortran). */
5468 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
5469 orng[0] -= lb;
5470 orng[1] -= lb;
5471 }
5472
5473 tree eltype = TREE_TYPE (ptr);
5474 tree tpsize = TYPE_SIZE_UNIT (eltype);
5475 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
5476 {
5477 pref->add_max_offset ();
5478 return true;
5479 }
5480
5481 offset_int sz = wi::to_offset (tpsize);
5482 orng[0] *= sz;
5483 orng[1] *= sz;
5484
5485 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
5486 {
5487 /* Except for the permissive raw memory functions which use
5488 the size of the whole object determined above, use the size
5489 of the referenced array. Because the overall offset is from
5490 the beginning of the complete array object add this overall
5491 offset to the size of array. */
5492 offset_int sizrng[2] =
5493 {
5494 pref->offrng[0] + orng[0] + sz,
5495 pref->offrng[1] + orng[1] + sz
5496 };
5497 if (sizrng[1] < sizrng[0])
5498 std::swap (sizrng[0], sizrng[1]);
5499 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
5500 pref->sizrng[0] = sizrng[0];
5501 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
5502 pref->sizrng[1] = sizrng[1];
5503 }
5504 }
5505
5506 pref->add_offset (orng[0], orng[1]);
5507 return true;
5508 }
5509
5510 if (code == TARGET_MEM_REF)
5511 {
5512 tree ref = TREE_OPERAND (ptr, 0);
5513 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5514 return false;
5515
5516 /* TODO: Handle remaining operands. Until then, add maximum offset. */
5517 pref->ref = ptr;
5518 pref->add_max_offset ();
5519 return true;
5520 }
5521
5522 if (code == INTEGER_CST)
5523 {
5524 /* Pointer constants other than null are most likely the result
5525 of erroneous null pointer addition/subtraction. Set size to
5526 zero. For null pointers, set size to the maximum for now
5527 since those may be the result of jump threading. */
5528 if (integer_zerop (ptr))
5529 pref->set_max_size_range ();
5530 else
5531 pref->sizrng[0] = pref->sizrng[1] = 0;
5532 pref->ref = ptr;
5533
5534 return true;
5535 }
5536
5537 if (code == STRING_CST)
5538 {
5539 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
5540 pref->ref = ptr;
5541 return true;
5542 }
5543
5544 if (code == POINTER_PLUS_EXPR)
5545 {
5546 tree ref = TREE_OPERAND (ptr, 0);
5547 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5548 return false;
5549
5550 /* Clear DEREF since the offset is being applied to the target
5551 of the dereference. */
5552 pref->deref = 0;
5553
5554 offset_int orng[2];
5555 tree off = pref->eval (TREE_OPERAND (ptr, 1));
5556 if (get_offset_range (off, NULL, orng, rvals))
5557 pref->add_offset (orng[0], orng[1]);
5558 else
5559 pref->add_max_offset ();
5560 return true;
5561 }
5562
5563 if (code == VIEW_CONVERT_EXPR)
5564 {
5565 ptr = TREE_OPERAND (ptr, 0);
5566 return compute_objsize_r (ptr, ostype, pref, snlim, qry);
5567 }
5568
5569 if (code == SSA_NAME)
5570 {
5571 if (!snlim.next ())
5572 return false;
5573
5574 /* Only process an SSA_NAME if the recursion limit has not yet
5575 been reached. */
5576 if (qry)
5577 {
5578 if (++qry->depth)
5579 qry->max_depth = qry->depth;
5580 if (const access_ref *cache_ref = qry->get_ref (ptr))
5581 {
5582 /* If the pointer is in the cache set *PREF to what it refers
5583 to and return success. */
5584 *pref = *cache_ref;
5585 return true;
5586 }
5587 }
5588
5589 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
5590 if (is_gimple_call (stmt))
5591 {
5592 /* If STMT is a call to an allocation function get the size
5593 from its argument(s). If successful, also set *PREF->REF
5594 to PTR for the caller to include in diagnostics. */
5595 wide_int wr[2];
5596 if (gimple_call_alloc_size (stmt, wr, rvals))
5597 {
5598 pref->ref = ptr;
5599 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5600 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5601 /* Constrain both bounds to a valid size. */
5602 offset_int maxsize = wi::to_offset (max_object_size ());
5603 if (pref->sizrng[0] > maxsize)
5604 pref->sizrng[0] = maxsize;
5605 if (pref->sizrng[1] > maxsize)
5606 pref->sizrng[1] = maxsize;
5607 }
5608 else
5609 {
5610 /* For functions known to return one of their pointer arguments
5611 try to determine what the returned pointer points to, and on
5612 success add OFFRNG which was set to the offset added by
5613 the function (e.g., memchr) to the overall offset. */
5614 offset_int offrng[2];
5615 if (tree ret = gimple_call_return_array (stmt, offrng, rvals))
5616 {
5617 if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
5618 return false;
5619
5620 /* Cap OFFRNG[1] to at most the remaining size of
5621 the object. */
5622 offset_int remrng[2];
5623 remrng[1] = pref->size_remaining (remrng);
5624 if (remrng[1] < offrng[1])
5625 offrng[1] = remrng[1];
5626 pref->add_offset (offrng[0], offrng[1]);
5627 }
5628 else
5629 {
5630 /* For other calls that might return arbitrary pointers
5631 including into the middle of objects set the size
5632 range to maximum, clear PREF->BASE0, and also set
5633 PREF->REF to include in diagnostics. */
5634 pref->set_max_size_range ();
5635 pref->base0 = false;
5636 pref->ref = ptr;
5637 }
5638 }
5639 qry->put_ref (ptr, *pref);
5640 return true;
5641 }
5642
5643 if (gimple_nop_p (stmt))
5644 {
5645 /* For a function argument try to determine the byte size
5646 of the array from the current function declaratation
5647 (e.g., attribute access or related). */
5648 wide_int wr[2];
5649 bool static_array = false;
5650 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
5651 {
5652 pref->parmarray = !static_array;
5653 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5654 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5655 pref->ref = ref;
5656 qry->put_ref (ptr, *pref);
5657 return true;
5658 }
5659
5660 pref->set_max_size_range ();
5661 pref->base0 = false;
5662 pref->ref = ptr;
5663 qry->put_ref (ptr, *pref);
5664 return true;
5665 }
5666
5667 if (gimple_code (stmt) == GIMPLE_PHI)
5668 {
5669 pref->ref = ptr;
5670 access_ref phi_ref = *pref;
5671 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
5672 return false;
5673 *pref = phi_ref;
5674 pref->ref = ptr;
5675 qry->put_ref (ptr, *pref);
5676 return true;
5677 }
5678
5679 if (!is_gimple_assign (stmt))
5680 {
5681 /* Clear BASE0 since the assigned pointer might point into
5682 the middle of the object, set the maximum size range and,
5683 if the SSA_NAME refers to a function argumnent, set
5684 PREF->REF to it. */
5685 pref->base0 = false;
5686 pref->set_max_size_range ();
5687 pref->ref = ptr;
5688 return true;
5689 }
5690
5691 tree_code code = gimple_assign_rhs_code (stmt);
5692
5693 if (code == MAX_EXPR || code == MIN_EXPR)
5694 {
5695 if (!handle_min_max_size (stmt, ostype, pref, snlim, qry))
5696 return false;
5697 qry->put_ref (ptr, *pref);
5698 return true;
5699 }
5700
5701 tree rhs = gimple_assign_rhs1 (stmt);
5702
5703 if (code == POINTER_PLUS_EXPR
5704 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
5705 {
5706 /* Compute the size of the object first. */
5707 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
5708 return false;
5709
5710 offset_int orng[2];
5711 tree off = gimple_assign_rhs2 (stmt);
5712 if (get_offset_range (off, stmt, orng, rvals))
5713 pref->add_offset (orng[0], orng[1]);
5714 else
5715 pref->add_max_offset ();
5716 qry->put_ref (ptr, *pref);
5717 return true;
5718 }
5719
5720 if (code == ADDR_EXPR
5721 || code == SSA_NAME)
5722 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
5723
5724 /* (This could also be an assignment from a nonlocal pointer.) Save
5725 PTR to mention in diagnostics but otherwise treat it as a pointer
5726 to an unknown object. */
5727 pref->ref = rhs;
5728 pref->base0 = false;
5729 pref->set_max_size_range ();
5730 return true;
5731 }
5732
5733 /* Assume all other expressions point into an unknown object
5734 of the maximum valid size. */
5735 pref->ref = ptr;
5736 pref->base0 = false;
5737 pref->set_max_size_range ();
5738 if (TREE_CODE (ptr) == SSA_NAME)
5739 qry->put_ref (ptr, *pref);
5740 return true;
5741 }
5742
5743 /* A "public" wrapper around the above. Clients should use this overload
5744 instead. */
5745
5746 tree
5747 compute_objsize (tree ptr, int ostype, access_ref *pref,
5748 range_query *rvals /* = NULL */)
5749 {
5750 pointer_query qry;
5751 qry.rvals = rvals;
5752 ssa_name_limit_t snlim;
5753 if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
5754 return NULL_TREE;
5755
5756 offset_int maxsize = pref->size_remaining ();
5757 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5758 pref->offrng[0] = 0;
5759 return wide_int_to_tree (sizetype, maxsize);
5760 }
5761
5762 /* Transitional wrapper. The function should be removed once callers
5763 transition to the pointer_query API. */
5764
5765 tree
5766 compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
5767 {
5768 pointer_query qry;
5769 if (ptr_qry)
5770 ptr_qry->depth = 0;
5771 else
5772 ptr_qry = &qry;
5773
5774 ssa_name_limit_t snlim;
5775 if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
5776 return NULL_TREE;
5777
5778 offset_int maxsize = pref->size_remaining ();
5779 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5780 pref->offrng[0] = 0;
5781 return wide_int_to_tree (sizetype, maxsize);
5782 }
5783
5784 /* Legacy wrapper around the above. The function should be removed
5785 once callers transition to one of the two above. */
5786
5787 tree
5788 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
5789 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
5790 {
5791 /* Set the initial offsets to zero and size to negative to indicate
5792 none has been computed yet. */
5793 access_ref ref;
5794 tree size = compute_objsize (ptr, ostype, &ref, rvals);
5795 if (!size || !ref.base0)
5796 return NULL_TREE;
5797
5798 if (pdecl)
5799 *pdecl = ref.ref;
5800
5801 if (poff)
5802 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
5803
5804 return size;
5805 }
5806
5807 /* Helper to determine and check the sizes of the source and the destination
5808 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
5809 call expression, DEST is the destination argument, SRC is the source
5810 argument or null, and LEN is the number of bytes. Use Object Size type-0
5811 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5812 (no overflow or invalid sizes), false otherwise. */
5813
5814 static bool
5815 check_memop_access (tree exp, tree dest, tree src, tree size)
5816 {
5817 /* For functions like memset and memcpy that operate on raw memory
5818 try to determine the size of the largest source and destination
5819 object using type-0 Object Size regardless of the object size
5820 type specified by the option. */
5821 access_data data (exp, access_read_write);
5822 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
5823 tree dstsize = compute_objsize (dest, 0, &data.dst);
5824
5825 return check_access (exp, size, /*maxread=*/NULL_TREE,
5826 srcsize, dstsize, data.mode, &data);
5827 }
5828
5829 /* Validate memchr arguments without performing any expansion.
5830 Return NULL_RTX. */
5831
5832 static rtx
5833 expand_builtin_memchr (tree exp, rtx)
5834 {
5835 if (!validate_arglist (exp,
5836 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5837 return NULL_RTX;
5838
5839 tree arg1 = CALL_EXPR_ARG (exp, 0);
5840 tree len = CALL_EXPR_ARG (exp, 2);
5841
5842 check_read_access (exp, arg1, len, 0);
5843
5844 return NULL_RTX;
5845 }
5846
5847 /* Expand a call EXP to the memcpy builtin.
5848 Return NULL_RTX if we failed, the caller should emit a normal call,
5849 otherwise try to get the result in TARGET, if convenient (and in
5850 mode MODE if that's convenient). */
5851
5852 static rtx
5853 expand_builtin_memcpy (tree exp, rtx target)
5854 {
5855 if (!validate_arglist (exp,
5856 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5857 return NULL_RTX;
5858
5859 tree dest = CALL_EXPR_ARG (exp, 0);
5860 tree src = CALL_EXPR_ARG (exp, 1);
5861 tree len = CALL_EXPR_ARG (exp, 2);
5862
5863 check_memop_access (exp, dest, src, len);
5864
5865 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5866 /*retmode=*/ RETURN_BEGIN, false);
5867 }
5868
5869 /* Check a call EXP to the memmove built-in for validity.
5870 Return NULL_RTX on both success and failure. */
5871
5872 static rtx
5873 expand_builtin_memmove (tree exp, rtx target)
5874 {
5875 if (!validate_arglist (exp,
5876 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5877 return NULL_RTX;
5878
5879 tree dest = CALL_EXPR_ARG (exp, 0);
5880 tree src = CALL_EXPR_ARG (exp, 1);
5881 tree len = CALL_EXPR_ARG (exp, 2);
5882
5883 check_memop_access (exp, dest, src, len);
5884
5885 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5886 /*retmode=*/ RETURN_BEGIN, true);
5887 }
5888
5889 /* Expand a call EXP to the mempcpy builtin.
5890 Return NULL_RTX if we failed; the caller should emit a normal call,
5891 otherwise try to get the result in TARGET, if convenient (and in
5892 mode MODE if that's convenient). */
5893
5894 static rtx
5895 expand_builtin_mempcpy (tree exp, rtx target)
5896 {
5897 if (!validate_arglist (exp,
5898 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5899 return NULL_RTX;
5900
5901 tree dest = CALL_EXPR_ARG (exp, 0);
5902 tree src = CALL_EXPR_ARG (exp, 1);
5903 tree len = CALL_EXPR_ARG (exp, 2);
5904
5905 /* Policy does not generally allow using compute_objsize (which
5906 is used internally by check_memop_size) to change code generation
5907 or drive optimization decisions.
5908
5909 In this instance it is safe because the code we generate has
5910 the same semantics regardless of the return value of
5911 check_memop_sizes. Exactly the same amount of data is copied
5912 and the return value is exactly the same in both cases.
5913
5914 Furthermore, check_memop_size always uses mode 0 for the call to
5915 compute_objsize, so the imprecise nature of compute_objsize is
5916 avoided. */
5917
5918 /* Avoid expanding mempcpy into memcpy when the call is determined
5919 to overflow the buffer. This also prevents the same overflow
5920 from being diagnosed again when expanding memcpy. */
5921 if (!check_memop_access (exp, dest, src, len))
5922 return NULL_RTX;
5923
5924 return expand_builtin_mempcpy_args (dest, src, len,
5925 target, exp, /*retmode=*/ RETURN_END);
5926 }
5927
5928 /* Helper function to do the actual work for expand of memory copy family
5929 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
5930 of memory from SRC to DEST and assign to TARGET if convenient. Return
5931 value is based on RETMODE argument. */
5932
5933 static rtx
5934 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
5935 rtx target, tree exp, memop_ret retmode,
5936 bool might_overlap)
5937 {
5938 unsigned int src_align = get_pointer_alignment (src);
5939 unsigned int dest_align = get_pointer_alignment (dest);
5940 rtx dest_mem, src_mem, dest_addr, len_rtx;
5941 HOST_WIDE_INT expected_size = -1;
5942 unsigned int expected_align = 0;
5943 unsigned HOST_WIDE_INT min_size;
5944 unsigned HOST_WIDE_INT max_size;
5945 unsigned HOST_WIDE_INT probable_max_size;
5946
5947 bool is_move_done;
5948
5949 /* If DEST is not a pointer type, call the normal function. */
5950 if (dest_align == 0)
5951 return NULL_RTX;
5952
5953 /* If either SRC is not a pointer type, don't do this
5954 operation in-line. */
5955 if (src_align == 0)
5956 return NULL_RTX;
5957
5958 if (currently_expanding_gimple_stmt)
5959 stringop_block_profile (currently_expanding_gimple_stmt,
5960 &expected_align, &expected_size);
5961
5962 if (expected_align < dest_align)
5963 expected_align = dest_align;
5964 dest_mem = get_memory_rtx (dest, len);
5965 set_mem_align (dest_mem, dest_align);
5966 len_rtx = expand_normal (len);
5967 determine_block_size (len, len_rtx, &min_size, &max_size,
5968 &probable_max_size);
5969
5970 /* Try to get the byte representation of the constant SRC points to,
5971 with its byte size in NBYTES. */
5972 unsigned HOST_WIDE_INT nbytes;
5973 const char *rep = getbyterep (src, &nbytes);
5974
5975 /* If the function's constant bound LEN_RTX is less than or equal
5976 to the byte size of the representation of the constant argument,
5977 and if block move would be done by pieces, we can avoid loading
5978 the bytes from memory and only store the computed constant.
5979 This works in the overlap (memmove) case as well because
5980 store_by_pieces just generates a series of stores of constants
5981 from the representation returned by getbyterep(). */
5982 if (rep
5983 && CONST_INT_P (len_rtx)
5984 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
5985 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
5986 CONST_CAST (char *, rep),
5987 dest_align, false))
5988 {
5989 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
5990 builtin_memcpy_read_str,
5991 CONST_CAST (char *, rep),
5992 dest_align, false, retmode);
5993 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5994 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5995 return dest_mem;
5996 }
5997
5998 src_mem = get_memory_rtx (src, len);
5999 set_mem_align (src_mem, src_align);
6000
6001 /* Copy word part most expediently. */
6002 enum block_op_methods method = BLOCK_OP_NORMAL;
6003 if (CALL_EXPR_TAILCALL (exp)
6004 && (retmode == RETURN_BEGIN || target == const0_rtx))
6005 method = BLOCK_OP_TAILCALL;
6006 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
6007 && retmode == RETURN_END
6008 && !might_overlap
6009 && target != const0_rtx);
6010 if (use_mempcpy_call)
6011 method = BLOCK_OP_NO_LIBCALL_RET;
6012 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
6013 expected_align, expected_size,
6014 min_size, max_size, probable_max_size,
6015 use_mempcpy_call, &is_move_done,
6016 might_overlap);
6017
6018 /* Bail out when a mempcpy call would be expanded as libcall and when
6019 we have a target that provides a fast implementation
6020 of mempcpy routine. */
6021 if (!is_move_done)
6022 return NULL_RTX;
6023
6024 if (dest_addr == pc_rtx)
6025 return NULL_RTX;
6026
6027 if (dest_addr == 0)
6028 {
6029 dest_addr = force_operand (XEXP (dest_mem, 0), target);
6030 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6031 }
6032
6033 if (retmode != RETURN_BEGIN && target != const0_rtx)
6034 {
6035 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
6036 /* stpcpy pointer to last byte. */
6037 if (retmode == RETURN_END_MINUS_ONE)
6038 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
6039 }
6040
6041 return dest_addr;
6042 }
6043
6044 static rtx
6045 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
6046 rtx target, tree orig_exp, memop_ret retmode)
6047 {
6048 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
6049 retmode, false);
6050 }
6051
6052 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
6053 we failed, the caller should emit a normal call, otherwise try to
6054 get the result in TARGET, if convenient.
6055 Return value is based on RETMODE argument. */
6056
6057 static rtx
6058 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
6059 {
6060 class expand_operand ops[3];
6061 rtx dest_mem;
6062 rtx src_mem;
6063
6064 if (!targetm.have_movstr ())
6065 return NULL_RTX;
6066
6067 dest_mem = get_memory_rtx (dest, NULL);
6068 src_mem = get_memory_rtx (src, NULL);
6069 if (retmode == RETURN_BEGIN)
6070 {
6071 target = force_reg (Pmode, XEXP (dest_mem, 0));
6072 dest_mem = replace_equiv_address (dest_mem, target);
6073 }
6074
6075 create_output_operand (&ops[0],
6076 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
6077 create_fixed_operand (&ops[1], dest_mem);
6078 create_fixed_operand (&ops[2], src_mem);
6079 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
6080 return NULL_RTX;
6081
6082 if (retmode != RETURN_BEGIN && target != const0_rtx)
6083 {
6084 target = ops[0].value;
6085 /* movstr is supposed to set end to the address of the NUL
6086 terminator. If the caller requested a mempcpy-like return value,
6087 adjust it. */
6088 if (retmode == RETURN_END)
6089 {
6090 rtx tem = plus_constant (GET_MODE (target),
6091 gen_lowpart (GET_MODE (target), target), 1);
6092 emit_move_insn (target, force_operand (tem, NULL_RTX));
6093 }
6094 }
6095 return target;
6096 }
6097
6098 /* Do some very basic size validation of a call to the strcpy builtin
6099 given by EXP. Return NULL_RTX to have the built-in expand to a call
6100 to the library function. */
6101
6102 static rtx
6103 expand_builtin_strcat (tree exp)
6104 {
6105 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
6106 || !warn_stringop_overflow)
6107 return NULL_RTX;
6108
6109 tree dest = CALL_EXPR_ARG (exp, 0);
6110 tree src = CALL_EXPR_ARG (exp, 1);
6111
6112 /* There is no way here to determine the length of the string in
6113 the destination to which the SRC string is being appended so
6114 just diagnose cases when the souce string is longer than
6115 the destination object. */
6116 access_data data (exp, access_read_write, NULL_TREE, true,
6117 NULL_TREE, true);
6118 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6119 compute_objsize (src, ost, &data.src);
6120 tree destsize = compute_objsize (dest, ost, &data.dst);
6121
6122 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6123 src, destsize, data.mode, &data);
6124
6125 return NULL_RTX;
6126 }
6127
6128 /* Expand expression EXP, which is a call to the strcpy builtin. Return
6129 NULL_RTX if we failed the caller should emit a normal call, otherwise
6130 try to get the result in TARGET, if convenient (and in mode MODE if that's
6131 convenient). */
6132
6133 static rtx
6134 expand_builtin_strcpy (tree exp, rtx target)
6135 {
6136 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6137 return NULL_RTX;
6138
6139 tree dest = CALL_EXPR_ARG (exp, 0);
6140 tree src = CALL_EXPR_ARG (exp, 1);
6141
6142 if (warn_stringop_overflow)
6143 {
6144 access_data data (exp, access_read_write, NULL_TREE, true,
6145 NULL_TREE, true);
6146 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6147 compute_objsize (src, ost, &data.src);
6148 tree dstsize = compute_objsize (dest, ost, &data.dst);
6149 check_access (exp, /*dstwrite=*/ NULL_TREE,
6150 /*maxread=*/ NULL_TREE, /*srcstr=*/ src,
6151 dstsize, data.mode, &data);
6152 }
6153
6154 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
6155 {
6156 /* Check to see if the argument was declared attribute nonstring
6157 and if so, issue a warning since at this point it's not known
6158 to be nul-terminated. */
6159 tree fndecl = get_callee_fndecl (exp);
6160 maybe_warn_nonstring_arg (fndecl, exp);
6161 return ret;
6162 }
6163
6164 return NULL_RTX;
6165 }
6166
6167 /* Helper function to do the actual work for expand_builtin_strcpy. The
6168 arguments to the builtin_strcpy call DEST and SRC are broken out
6169 so that this can also be called without constructing an actual CALL_EXPR.
6170 The other arguments and return value are the same as for
6171 expand_builtin_strcpy. */
6172
6173 static rtx
6174 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
6175 {
6176 /* Detect strcpy calls with unterminated arrays.. */
6177 tree size;
6178 bool exact;
6179 if (tree nonstr = unterminated_array (src, &size, &exact))
6180 {
6181 /* NONSTR refers to the non-nul terminated constant array. */
6182 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr,
6183 size, exact);
6184 return NULL_RTX;
6185 }
6186
6187 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
6188 }
6189
6190 /* Expand a call EXP to the stpcpy builtin.
6191 Return NULL_RTX if we failed the caller should emit a normal call,
6192 otherwise try to get the result in TARGET, if convenient (and in
6193 mode MODE if that's convenient). */
6194
6195 static rtx
6196 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
6197 {
6198 tree dst, src;
6199 location_t loc = EXPR_LOCATION (exp);
6200
6201 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6202 return NULL_RTX;
6203
6204 dst = CALL_EXPR_ARG (exp, 0);
6205 src = CALL_EXPR_ARG (exp, 1);
6206
6207 if (warn_stringop_overflow)
6208 {
6209 access_data data (exp, access_read_write);
6210 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
6211 &data.dst);
6212 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6213 src, destsize, data.mode, &data);
6214 }
6215
6216 /* If return value is ignored, transform stpcpy into strcpy. */
6217 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
6218 {
6219 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
6220 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
6221 return expand_expr (result, target, mode, EXPAND_NORMAL);
6222 }
6223 else
6224 {
6225 tree len, lenp1;
6226 rtx ret;
6227
6228 /* Ensure we get an actual string whose length can be evaluated at
6229 compile-time, not an expression containing a string. This is
6230 because the latter will potentially produce pessimized code
6231 when used to produce the return value. */
6232 c_strlen_data lendata = { };
6233 if (!c_getstr (src)
6234 || !(len = c_strlen (src, 0, &lendata, 1)))
6235 return expand_movstr (dst, src, target,
6236 /*retmode=*/ RETURN_END_MINUS_ONE);
6237
6238 if (lendata.decl)
6239 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl);
6240
6241 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
6242 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
6243 target, exp,
6244 /*retmode=*/ RETURN_END_MINUS_ONE);
6245
6246 if (ret)
6247 return ret;
6248
6249 if (TREE_CODE (len) == INTEGER_CST)
6250 {
6251 rtx len_rtx = expand_normal (len);
6252
6253 if (CONST_INT_P (len_rtx))
6254 {
6255 ret = expand_builtin_strcpy_args (exp, dst, src, target);
6256
6257 if (ret)
6258 {
6259 if (! target)
6260 {
6261 if (mode != VOIDmode)
6262 target = gen_reg_rtx (mode);
6263 else
6264 target = gen_reg_rtx (GET_MODE (ret));
6265 }
6266 if (GET_MODE (target) != GET_MODE (ret))
6267 ret = gen_lowpart (GET_MODE (target), ret);
6268
6269 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
6270 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
6271 gcc_assert (ret);
6272
6273 return target;
6274 }
6275 }
6276 }
6277
6278 return expand_movstr (dst, src, target,
6279 /*retmode=*/ RETURN_END_MINUS_ONE);
6280 }
6281 }
6282
6283 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
6284 arguments while being careful to avoid duplicate warnings (which could
6285 be issued if the expander were to expand the call, resulting in it
6286 being emitted in expand_call(). */
6287
6288 static rtx
6289 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
6290 {
6291 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
6292 {
6293 /* The call has been successfully expanded. Check for nonstring
6294 arguments and issue warnings as appropriate. */
6295 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6296 return ret;
6297 }
6298
6299 return NULL_RTX;
6300 }
6301
6302 /* Check a call EXP to the stpncpy built-in for validity.
6303 Return NULL_RTX on both success and failure. */
6304
6305 static rtx
6306 expand_builtin_stpncpy (tree exp, rtx)
6307 {
6308 if (!validate_arglist (exp,
6309 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6310 || !warn_stringop_overflow)
6311 return NULL_RTX;
6312
6313 /* The source and destination of the call. */
6314 tree dest = CALL_EXPR_ARG (exp, 0);
6315 tree src = CALL_EXPR_ARG (exp, 1);
6316
6317 /* The exact number of bytes to write (not the maximum). */
6318 tree len = CALL_EXPR_ARG (exp, 2);
6319 access_data data (exp, access_read_write);
6320 /* The size of the destination object. */
6321 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6322 check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data);
6323 return NULL_RTX;
6324 }
6325
6326 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6327 bytes from constant string DATA + OFFSET and return it as target
6328 constant. */
6329
6330 rtx
6331 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
6332 scalar_int_mode mode)
6333 {
6334 const char *str = (const char *) data;
6335
6336 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
6337 return const0_rtx;
6338
6339 return c_readstr (str + offset, mode);
6340 }
6341
6342 /* Helper to check the sizes of sequences and the destination of calls
6343 to __builtin_strncat and __builtin___strncat_chk. Returns true on
6344 success (no overflow or invalid sizes), false otherwise. */
6345
6346 static bool
6347 check_strncat_sizes (tree exp, tree objsize)
6348 {
6349 tree dest = CALL_EXPR_ARG (exp, 0);
6350 tree src = CALL_EXPR_ARG (exp, 1);
6351 tree maxread = CALL_EXPR_ARG (exp, 2);
6352
6353 /* Try to determine the range of lengths that the source expression
6354 refers to. */
6355 c_strlen_data lendata = { };
6356 get_range_strlen (src, &lendata, /* eltsize = */ 1);
6357
6358 /* Try to verify that the destination is big enough for the shortest
6359 string. */
6360
6361 access_data data (exp, access_read_write, maxread, true);
6362 if (!objsize && warn_stringop_overflow)
6363 {
6364 /* If it hasn't been provided by __strncat_chk, try to determine
6365 the size of the destination object into which the source is
6366 being copied. */
6367 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6368 }
6369
6370 /* Add one for the terminating nul. */
6371 tree srclen = (lendata.minlen
6372 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
6373 size_one_node)
6374 : NULL_TREE);
6375
6376 /* The strncat function copies at most MAXREAD bytes and always appends
6377 the terminating nul so the specified upper bound should never be equal
6378 to (or greater than) the size of the destination. */
6379 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
6380 && tree_int_cst_equal (objsize, maxread))
6381 {
6382 location_t loc = tree_inlined_location (exp);
6383 warning_at (loc, OPT_Wstringop_overflow_,
6384 "%K%qD specified bound %E equals destination size",
6385 exp, get_callee_fndecl (exp), maxread);
6386
6387 return false;
6388 }
6389
6390 if (!srclen
6391 || (maxread && tree_fits_uhwi_p (maxread)
6392 && tree_fits_uhwi_p (srclen)
6393 && tree_int_cst_lt (maxread, srclen)))
6394 srclen = maxread;
6395
6396 /* The number of bytes to write is LEN but check_access will alsoa
6397 check SRCLEN if LEN's value isn't known. */
6398 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6399 objsize, data.mode, &data);
6400 }
6401
6402 /* Similar to expand_builtin_strcat, do some very basic size validation
6403 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
6404 the built-in expand to a call to the library function. */
6405
6406 static rtx
6407 expand_builtin_strncat (tree exp, rtx)
6408 {
6409 if (!validate_arglist (exp,
6410 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6411 || !warn_stringop_overflow)
6412 return NULL_RTX;
6413
6414 tree dest = CALL_EXPR_ARG (exp, 0);
6415 tree src = CALL_EXPR_ARG (exp, 1);
6416 /* The upper bound on the number of bytes to write. */
6417 tree maxread = CALL_EXPR_ARG (exp, 2);
6418
6419 /* Detect unterminated source (only). */
6420 if (!check_nul_terminated_array (exp, src, maxread))
6421 return NULL_RTX;
6422
6423 /* The length of the source sequence. */
6424 tree slen = c_strlen (src, 1);
6425
6426 /* Try to determine the range of lengths that the source expression
6427 refers to. Since the lengths are only used for warning and not
6428 for code generation disable strict mode below. */
6429 tree maxlen = slen;
6430 if (!maxlen)
6431 {
6432 c_strlen_data lendata = { };
6433 get_range_strlen (src, &lendata, /* eltsize = */ 1);
6434 maxlen = lendata.maxbound;
6435 }
6436
6437 access_data data (exp, access_read_write);
6438 /* Try to verify that the destination is big enough for the shortest
6439 string. First try to determine the size of the destination object
6440 into which the source is being copied. */
6441 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6442
6443 /* Add one for the terminating nul. */
6444 tree srclen = (maxlen
6445 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
6446 size_one_node)
6447 : NULL_TREE);
6448
6449 /* The strncat function copies at most MAXREAD bytes and always appends
6450 the terminating nul so the specified upper bound should never be equal
6451 to (or greater than) the size of the destination. */
6452 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
6453 && tree_int_cst_equal (destsize, maxread))
6454 {
6455 location_t loc = tree_inlined_location (exp);
6456 warning_at (loc, OPT_Wstringop_overflow_,
6457 "%K%qD specified bound %E equals destination size",
6458 exp, get_callee_fndecl (exp), maxread);
6459
6460 return NULL_RTX;
6461 }
6462
6463 if (!srclen
6464 || (maxread && tree_fits_uhwi_p (maxread)
6465 && tree_fits_uhwi_p (srclen)
6466 && tree_int_cst_lt (maxread, srclen)))
6467 srclen = maxread;
6468
6469 check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6470 destsize, data.mode, &data);
6471 return NULL_RTX;
6472 }
6473
6474 /* Expand expression EXP, which is a call to the strncpy builtin. Return
6475 NULL_RTX if we failed the caller should emit a normal call. */
6476
6477 static rtx
6478 expand_builtin_strncpy (tree exp, rtx target)
6479 {
6480 location_t loc = EXPR_LOCATION (exp);
6481
6482 if (!validate_arglist (exp,
6483 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6484 return NULL_RTX;
6485 tree dest = CALL_EXPR_ARG (exp, 0);
6486 tree src = CALL_EXPR_ARG (exp, 1);
6487 /* The number of bytes to write (not the maximum). */
6488 tree len = CALL_EXPR_ARG (exp, 2);
6489
6490 /* The length of the source sequence. */
6491 tree slen = c_strlen (src, 1);
6492
6493 if (warn_stringop_overflow)
6494 {
6495 access_data data (exp, access_read_write, len, true, len, true);
6496 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6497 compute_objsize (src, ost, &data.src);
6498 tree dstsize = compute_objsize (dest, ost, &data.dst);
6499 /* The number of bytes to write is LEN but check_access will also
6500 check SLEN if LEN's value isn't known. */
6501 check_access (exp, /*dstwrite=*/len,
6502 /*maxread=*/len, src, dstsize, data.mode, &data);
6503 }
6504
6505 /* We must be passed a constant len and src parameter. */
6506 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
6507 return NULL_RTX;
6508
6509 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
6510
6511 /* We're required to pad with trailing zeros if the requested
6512 len is greater than strlen(s2)+1. In that case try to
6513 use store_by_pieces, if it fails, punt. */
6514 if (tree_int_cst_lt (slen, len))
6515 {
6516 unsigned int dest_align = get_pointer_alignment (dest);
6517 const char *p = c_getstr (src);
6518 rtx dest_mem;
6519
6520 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
6521 || !can_store_by_pieces (tree_to_uhwi (len),
6522 builtin_strncpy_read_str,
6523 CONST_CAST (char *, p),
6524 dest_align, false))
6525 return NULL_RTX;
6526
6527 dest_mem = get_memory_rtx (dest, len);
6528 store_by_pieces (dest_mem, tree_to_uhwi (len),
6529 builtin_strncpy_read_str,
6530 CONST_CAST (char *, p), dest_align, false,
6531 RETURN_BEGIN);
6532 dest_mem = force_operand (XEXP (dest_mem, 0), target);
6533 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6534 return dest_mem;
6535 }
6536
6537 return NULL_RTX;
6538 }
6539
6540 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6541 bytes from constant string DATA + OFFSET and return it as target
6542 constant. */
6543
6544 rtx
6545 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6546 scalar_int_mode mode)
6547 {
6548 const char *c = (const char *) data;
6549 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
6550
6551 memset (p, *c, GET_MODE_SIZE (mode));
6552
6553 return c_readstr (p, mode);
6554 }
6555
6556 /* Callback routine for store_by_pieces. Return the RTL of a register
6557 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
6558 char value given in the RTL register data. For example, if mode is
6559 4 bytes wide, return the RTL for 0x01010101*data. */
6560
6561 static rtx
6562 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6563 scalar_int_mode mode)
6564 {
6565 rtx target, coeff;
6566 size_t size;
6567 char *p;
6568
6569 size = GET_MODE_SIZE (mode);
6570 if (size == 1)
6571 return (rtx) data;
6572
6573 p = XALLOCAVEC (char, size);
6574 memset (p, 1, size);
6575 coeff = c_readstr (p, mode);
6576
6577 target = convert_to_mode (mode, (rtx) data, 1);
6578 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
6579 return force_reg (mode, target);
6580 }
6581
6582 /* Expand expression EXP, which is a call to the memset builtin. Return
6583 NULL_RTX if we failed the caller should emit a normal call, otherwise
6584 try to get the result in TARGET, if convenient (and in mode MODE if that's
6585 convenient). */
6586
6587 static rtx
6588 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
6589 {
6590 if (!validate_arglist (exp,
6591 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
6592 return NULL_RTX;
6593
6594 tree dest = CALL_EXPR_ARG (exp, 0);
6595 tree val = CALL_EXPR_ARG (exp, 1);
6596 tree len = CALL_EXPR_ARG (exp, 2);
6597
6598 check_memop_access (exp, dest, NULL_TREE, len);
6599
6600 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
6601 }
6602
6603 /* Helper function to do the actual work for expand_builtin_memset. The
6604 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
6605 so that this can also be called without constructing an actual CALL_EXPR.
6606 The other arguments and return value are the same as for
6607 expand_builtin_memset. */
6608
6609 static rtx
6610 expand_builtin_memset_args (tree dest, tree val, tree len,
6611 rtx target, machine_mode mode, tree orig_exp)
6612 {
6613 tree fndecl, fn;
6614 enum built_in_function fcode;
6615 machine_mode val_mode;
6616 char c;
6617 unsigned int dest_align;
6618 rtx dest_mem, dest_addr, len_rtx;
6619 HOST_WIDE_INT expected_size = -1;
6620 unsigned int expected_align = 0;
6621 unsigned HOST_WIDE_INT min_size;
6622 unsigned HOST_WIDE_INT max_size;
6623 unsigned HOST_WIDE_INT probable_max_size;
6624
6625 dest_align = get_pointer_alignment (dest);
6626
6627 /* If DEST is not a pointer type, don't do this operation in-line. */
6628 if (dest_align == 0)
6629 return NULL_RTX;
6630
6631 if (currently_expanding_gimple_stmt)
6632 stringop_block_profile (currently_expanding_gimple_stmt,
6633 &expected_align, &expected_size);
6634
6635 if (expected_align < dest_align)
6636 expected_align = dest_align;
6637
6638 /* If the LEN parameter is zero, return DEST. */
6639 if (integer_zerop (len))
6640 {
6641 /* Evaluate and ignore VAL in case it has side-effects. */
6642 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
6643 return expand_expr (dest, target, mode, EXPAND_NORMAL);
6644 }
6645
6646 /* Stabilize the arguments in case we fail. */
6647 dest = builtin_save_expr (dest);
6648 val = builtin_save_expr (val);
6649 len = builtin_save_expr (len);
6650
6651 len_rtx = expand_normal (len);
6652 determine_block_size (len, len_rtx, &min_size, &max_size,
6653 &probable_max_size);
6654 dest_mem = get_memory_rtx (dest, len);
6655 val_mode = TYPE_MODE (unsigned_char_type_node);
6656
6657 if (TREE_CODE (val) != INTEGER_CST)
6658 {
6659 rtx val_rtx;
6660
6661 val_rtx = expand_normal (val);
6662 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
6663
6664 /* Assume that we can memset by pieces if we can store
6665 * the coefficients by pieces (in the required modes).
6666 * We can't pass builtin_memset_gen_str as that emits RTL. */
6667 c = 1;
6668 if (tree_fits_uhwi_p (len)
6669 && can_store_by_pieces (tree_to_uhwi (len),
6670 builtin_memset_read_str, &c, dest_align,
6671 true))
6672 {
6673 val_rtx = force_reg (val_mode, val_rtx);
6674 store_by_pieces (dest_mem, tree_to_uhwi (len),
6675 builtin_memset_gen_str, val_rtx, dest_align,
6676 true, RETURN_BEGIN);
6677 }
6678 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
6679 dest_align, expected_align,
6680 expected_size, min_size, max_size,
6681 probable_max_size))
6682 goto do_libcall;
6683
6684 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6685 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6686 return dest_mem;
6687 }
6688
6689 if (target_char_cast (val, &c))
6690 goto do_libcall;
6691
6692 if (c)
6693 {
6694 if (tree_fits_uhwi_p (len)
6695 && can_store_by_pieces (tree_to_uhwi (len),
6696 builtin_memset_read_str, &c, dest_align,
6697 true))
6698 store_by_pieces (dest_mem, tree_to_uhwi (len),
6699 builtin_memset_read_str, &c, dest_align, true,
6700 RETURN_BEGIN);
6701 else if (!set_storage_via_setmem (dest_mem, len_rtx,
6702 gen_int_mode (c, val_mode),
6703 dest_align, expected_align,
6704 expected_size, min_size, max_size,
6705 probable_max_size))
6706 goto do_libcall;
6707
6708 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6709 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6710 return dest_mem;
6711 }
6712
6713 set_mem_align (dest_mem, dest_align);
6714 dest_addr = clear_storage_hints (dest_mem, len_rtx,
6715 CALL_EXPR_TAILCALL (orig_exp)
6716 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
6717 expected_align, expected_size,
6718 min_size, max_size,
6719 probable_max_size);
6720
6721 if (dest_addr == 0)
6722 {
6723 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6724 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6725 }
6726
6727 return dest_addr;
6728
6729 do_libcall:
6730 fndecl = get_callee_fndecl (orig_exp);
6731 fcode = DECL_FUNCTION_CODE (fndecl);
6732 if (fcode == BUILT_IN_MEMSET)
6733 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
6734 dest, val, len);
6735 else if (fcode == BUILT_IN_BZERO)
6736 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
6737 dest, len);
6738 else
6739 gcc_unreachable ();
6740 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
6741 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
6742 return expand_call (fn, target, target == const0_rtx);
6743 }
6744
6745 /* Expand expression EXP, which is a call to the bzero builtin. Return
6746 NULL_RTX if we failed the caller should emit a normal call. */
6747
6748 static rtx
6749 expand_builtin_bzero (tree exp)
6750 {
6751 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6752 return NULL_RTX;
6753
6754 tree dest = CALL_EXPR_ARG (exp, 0);
6755 tree size = CALL_EXPR_ARG (exp, 1);
6756
6757 check_memop_access (exp, dest, NULL_TREE, size);
6758
6759 /* New argument list transforming bzero(ptr x, int y) to
6760 memset(ptr x, int 0, size_t y). This is done this way
6761 so that if it isn't expanded inline, we fallback to
6762 calling bzero instead of memset. */
6763
6764 location_t loc = EXPR_LOCATION (exp);
6765
6766 return expand_builtin_memset_args (dest, integer_zero_node,
6767 fold_convert_loc (loc,
6768 size_type_node, size),
6769 const0_rtx, VOIDmode, exp);
6770 }
6771
6772 /* Try to expand cmpstr operation ICODE with the given operands.
6773 Return the result rtx on success, otherwise return null. */
6774
6775 static rtx
6776 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
6777 HOST_WIDE_INT align)
6778 {
6779 machine_mode insn_mode = insn_data[icode].operand[0].mode;
6780
6781 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
6782 target = NULL_RTX;
6783
6784 class expand_operand ops[4];
6785 create_output_operand (&ops[0], target, insn_mode);
6786 create_fixed_operand (&ops[1], arg1_rtx);
6787 create_fixed_operand (&ops[2], arg2_rtx);
6788 create_integer_operand (&ops[3], align);
6789 if (maybe_expand_insn (icode, 4, ops))
6790 return ops[0].value;
6791 return NULL_RTX;
6792 }
6793
6794 /* Expand expression EXP, which is a call to the memcmp built-in function.
6795 Return NULL_RTX if we failed and the caller should emit a normal call,
6796 otherwise try to get the result in TARGET, if convenient.
6797 RESULT_EQ is true if we can relax the returned value to be either zero
6798 or nonzero, without caring about the sign. */
6799
6800 static rtx
6801 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
6802 {
6803 if (!validate_arglist (exp,
6804 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6805 return NULL_RTX;
6806
6807 tree arg1 = CALL_EXPR_ARG (exp, 0);
6808 tree arg2 = CALL_EXPR_ARG (exp, 1);
6809 tree len = CALL_EXPR_ARG (exp, 2);
6810
6811 /* Diagnose calls where the specified length exceeds the size of either
6812 object. */
6813 if (!check_read_access (exp, arg1, len, 0)
6814 || !check_read_access (exp, arg2, len, 0))
6815 return NULL_RTX;
6816
6817 /* Due to the performance benefit, always inline the calls first
6818 when result_eq is false. */
6819 rtx result = NULL_RTX;
6820 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
6821 if (!result_eq && fcode != BUILT_IN_BCMP)
6822 {
6823 result = inline_expand_builtin_bytecmp (exp, target);
6824 if (result)
6825 return result;
6826 }
6827
6828 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6829 location_t loc = EXPR_LOCATION (exp);
6830
6831 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6832 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6833
6834 /* If we don't have POINTER_TYPE, call the function. */
6835 if (arg1_align == 0 || arg2_align == 0)
6836 return NULL_RTX;
6837
6838 rtx arg1_rtx = get_memory_rtx (arg1, len);
6839 rtx arg2_rtx = get_memory_rtx (arg2, len);
6840 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
6841
6842 /* Set MEM_SIZE as appropriate. */
6843 if (CONST_INT_P (len_rtx))
6844 {
6845 set_mem_size (arg1_rtx, INTVAL (len_rtx));
6846 set_mem_size (arg2_rtx, INTVAL (len_rtx));
6847 }
6848
6849 by_pieces_constfn constfn = NULL;
6850
6851 /* Try to get the byte representation of the constant ARG2 (or, only
6852 when the function's result is used for equality to zero, ARG1)
6853 points to, with its byte size in NBYTES. */
6854 unsigned HOST_WIDE_INT nbytes;
6855 const char *rep = getbyterep (arg2, &nbytes);
6856 if (result_eq && rep == NULL)
6857 {
6858 /* For equality to zero the arguments are interchangeable. */
6859 rep = getbyterep (arg1, &nbytes);
6860 if (rep != NULL)
6861 std::swap (arg1_rtx, arg2_rtx);
6862 }
6863
6864 /* If the function's constant bound LEN_RTX is less than or equal
6865 to the byte size of the representation of the constant argument,
6866 and if block move would be done by pieces, we can avoid loading
6867 the bytes from memory and only store the computed constant result. */
6868 if (rep
6869 && CONST_INT_P (len_rtx)
6870 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
6871 constfn = builtin_memcpy_read_str;
6872
6873 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
6874 TREE_TYPE (len), target,
6875 result_eq, constfn,
6876 CONST_CAST (char *, rep));
6877
6878 if (result)
6879 {
6880 /* Return the value in the proper mode for this function. */
6881 if (GET_MODE (result) == mode)
6882 return result;
6883
6884 if (target != 0)
6885 {
6886 convert_move (target, result, 0);
6887 return target;
6888 }
6889
6890 return convert_to_mode (mode, result, 0);
6891 }
6892
6893 return NULL_RTX;
6894 }
6895
6896 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
6897 if we failed the caller should emit a normal call, otherwise try to get
6898 the result in TARGET, if convenient. */
6899
6900 static rtx
6901 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
6902 {
6903 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6904 return NULL_RTX;
6905
6906 tree arg1 = CALL_EXPR_ARG (exp, 0);
6907 tree arg2 = CALL_EXPR_ARG (exp, 1);
6908
6909 if (!check_read_access (exp, arg1)
6910 || !check_read_access (exp, arg2))
6911 return NULL_RTX;
6912
6913 /* Due to the performance benefit, always inline the calls first. */
6914 rtx result = NULL_RTX;
6915 result = inline_expand_builtin_bytecmp (exp, target);
6916 if (result)
6917 return result;
6918
6919 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
6920 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
6921 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
6922 return NULL_RTX;
6923
6924 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6925 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6926
6927 /* If we don't have POINTER_TYPE, call the function. */
6928 if (arg1_align == 0 || arg2_align == 0)
6929 return NULL_RTX;
6930
6931 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
6932 arg1 = builtin_save_expr (arg1);
6933 arg2 = builtin_save_expr (arg2);
6934
6935 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
6936 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
6937
6938 /* Try to call cmpstrsi. */
6939 if (cmpstr_icode != CODE_FOR_nothing)
6940 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
6941 MIN (arg1_align, arg2_align));
6942
6943 /* Try to determine at least one length and call cmpstrnsi. */
6944 if (!result && cmpstrn_icode != CODE_FOR_nothing)
6945 {
6946 tree len;
6947 rtx arg3_rtx;
6948
6949 tree len1 = c_strlen (arg1, 1);
6950 tree len2 = c_strlen (arg2, 1);
6951
6952 if (len1)
6953 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
6954 if (len2)
6955 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
6956
6957 /* If we don't have a constant length for the first, use the length
6958 of the second, if we know it. We don't require a constant for
6959 this case; some cost analysis could be done if both are available
6960 but neither is constant. For now, assume they're equally cheap,
6961 unless one has side effects. If both strings have constant lengths,
6962 use the smaller. */
6963
6964 if (!len1)
6965 len = len2;
6966 else if (!len2)
6967 len = len1;
6968 else if (TREE_SIDE_EFFECTS (len1))
6969 len = len2;
6970 else if (TREE_SIDE_EFFECTS (len2))
6971 len = len1;
6972 else if (TREE_CODE (len1) != INTEGER_CST)
6973 len = len2;
6974 else if (TREE_CODE (len2) != INTEGER_CST)
6975 len = len1;
6976 else if (tree_int_cst_lt (len1, len2))
6977 len = len1;
6978 else
6979 len = len2;
6980
6981 /* If both arguments have side effects, we cannot optimize. */
6982 if (len && !TREE_SIDE_EFFECTS (len))
6983 {
6984 arg3_rtx = expand_normal (len);
6985 result = expand_cmpstrn_or_cmpmem
6986 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
6987 arg3_rtx, MIN (arg1_align, arg2_align));
6988 }
6989 }
6990
6991 tree fndecl = get_callee_fndecl (exp);
6992 if (result)
6993 {
6994 /* Check to see if the argument was declared attribute nonstring
6995 and if so, issue a warning since at this point it's not known
6996 to be nul-terminated. */
6997 maybe_warn_nonstring_arg (fndecl, exp);
6998
6999 /* Return the value in the proper mode for this function. */
7000 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7001 if (GET_MODE (result) == mode)
7002 return result;
7003 if (target == 0)
7004 return convert_to_mode (mode, result, 0);
7005 convert_move (target, result, 0);
7006 return target;
7007 }
7008
7009 /* Expand the library call ourselves using a stabilized argument
7010 list to avoid re-evaluating the function's arguments twice. */
7011 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
7012 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
7013 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
7014 return expand_call (fn, target, target == const0_rtx);
7015 }
7016
7017 /* Expand expression EXP, which is a call to the strncmp builtin. Return
7018 NULL_RTX if we failed the caller should emit a normal call, otherwise
7019 try to get the result in TARGET, if convenient. */
7020
7021 static rtx
7022 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
7023 ATTRIBUTE_UNUSED machine_mode mode)
7024 {
7025 if (!validate_arglist (exp,
7026 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7027 return NULL_RTX;
7028
7029 tree arg1 = CALL_EXPR_ARG (exp, 0);
7030 tree arg2 = CALL_EXPR_ARG (exp, 1);
7031 tree arg3 = CALL_EXPR_ARG (exp, 2);
7032
7033 if (!check_nul_terminated_array (exp, arg1, arg3)
7034 || !check_nul_terminated_array (exp, arg2, arg3))
7035 return NULL_RTX;
7036
7037 location_t loc = tree_inlined_location (exp);
7038 tree len1 = c_strlen (arg1, 1);
7039 tree len2 = c_strlen (arg2, 1);
7040
7041 if (!len1 || !len2)
7042 {
7043 /* Check to see if the argument was declared attribute nonstring
7044 and if so, issue a warning since at this point it's not known
7045 to be nul-terminated. */
7046 if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp)
7047 && !len1 && !len2)
7048 {
7049 /* A strncmp read is constrained not just by the bound but
7050 also by the length of the shorter string. Specifying
7051 a bound that's larger than the size of either array makes
7052 no sense and is likely a bug. When the length of neither
7053 of the two strings is known but the sizes of both of
7054 the arrays they are stored in is, issue a warning if
7055 the bound is larger than than the size of the larger
7056 of the two arrays. */
7057
7058 access_ref ref1 (arg3, true);
7059 access_ref ref2 (arg3, true);
7060
7061 tree bndrng[2] = { NULL_TREE, NULL_TREE };
7062 get_size_range (arg3, bndrng, ref1.bndrng);
7063
7064 tree size1 = compute_objsize (arg1, 1, &ref1);
7065 tree size2 = compute_objsize (arg2, 1, &ref2);
7066 tree func = get_callee_fndecl (exp);
7067
7068 if (size1 && size2 && bndrng[0] && !integer_zerop (bndrng[0]))
7069 {
7070 offset_int rem1 = ref1.size_remaining ();
7071 offset_int rem2 = ref2.size_remaining ();
7072 if (rem1 == 0 || rem2 == 0)
7073 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7074 bndrng, integer_zero_node);
7075 else
7076 {
7077 offset_int maxrem = wi::max (rem1, rem2, UNSIGNED);
7078 if (maxrem < wi::to_offset (bndrng[0]))
7079 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp,
7080 func, bndrng,
7081 wide_int_to_tree (sizetype, maxrem));
7082 }
7083 }
7084 else if (bndrng[0]
7085 && !integer_zerop (bndrng[0])
7086 && ((size1 && integer_zerop (size1))
7087 || (size2 && integer_zerop (size2))))
7088 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7089 bndrng, integer_zero_node);
7090 }
7091 }
7092
7093 /* Due to the performance benefit, always inline the calls first. */
7094 rtx result = NULL_RTX;
7095 result = inline_expand_builtin_bytecmp (exp, target);
7096 if (result)
7097 return result;
7098
7099 /* If c_strlen can determine an expression for one of the string
7100 lengths, and it doesn't have side effects, then emit cmpstrnsi
7101 using length MIN(strlen(string)+1, arg3). */
7102 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
7103 if (cmpstrn_icode == CODE_FOR_nothing)
7104 return NULL_RTX;
7105
7106 tree len;
7107
7108 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7109 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7110
7111 if (len1)
7112 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7113 if (len2)
7114 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7115
7116 tree len3 = fold_convert_loc (loc, sizetype, arg3);
7117
7118 /* If we don't have a constant length for the first, use the length
7119 of the second, if we know it. If neither string is constant length,
7120 use the given length argument. We don't require a constant for
7121 this case; some cost analysis could be done if both are available
7122 but neither is constant. For now, assume they're equally cheap,
7123 unless one has side effects. If both strings have constant lengths,
7124 use the smaller. */
7125
7126 if (!len1 && !len2)
7127 len = len3;
7128 else if (!len1)
7129 len = len2;
7130 else if (!len2)
7131 len = len1;
7132 else if (TREE_SIDE_EFFECTS (len1))
7133 len = len2;
7134 else if (TREE_SIDE_EFFECTS (len2))
7135 len = len1;
7136 else if (TREE_CODE (len1) != INTEGER_CST)
7137 len = len2;
7138 else if (TREE_CODE (len2) != INTEGER_CST)
7139 len = len1;
7140 else if (tree_int_cst_lt (len1, len2))
7141 len = len1;
7142 else
7143 len = len2;
7144
7145 /* If we are not using the given length, we must incorporate it here.
7146 The actual new length parameter will be MIN(len,arg3) in this case. */
7147 if (len != len3)
7148 {
7149 len = fold_convert_loc (loc, sizetype, len);
7150 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
7151 }
7152 rtx arg1_rtx = get_memory_rtx (arg1, len);
7153 rtx arg2_rtx = get_memory_rtx (arg2, len);
7154 rtx arg3_rtx = expand_normal (len);
7155 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
7156 arg2_rtx, TREE_TYPE (len), arg3_rtx,
7157 MIN (arg1_align, arg2_align));
7158
7159 tree fndecl = get_callee_fndecl (exp);
7160 if (result)
7161 {
7162 /* Return the value in the proper mode for this function. */
7163 mode = TYPE_MODE (TREE_TYPE (exp));
7164 if (GET_MODE (result) == mode)
7165 return result;
7166 if (target == 0)
7167 return convert_to_mode (mode, result, 0);
7168 convert_move (target, result, 0);
7169 return target;
7170 }
7171
7172 /* Expand the library call ourselves using a stabilized argument
7173 list to avoid re-evaluating the function's arguments twice. */
7174 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
7175 if (TREE_NO_WARNING (exp))
7176 TREE_NO_WARNING (call) = true;
7177 gcc_assert (TREE_CODE (call) == CALL_EXPR);
7178 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
7179 return expand_call (call, target, target == const0_rtx);
7180 }
7181
7182 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
7183 if that's convenient. */
7184
7185 rtx
7186 expand_builtin_saveregs (void)
7187 {
7188 rtx val;
7189 rtx_insn *seq;
7190
7191 /* Don't do __builtin_saveregs more than once in a function.
7192 Save the result of the first call and reuse it. */
7193 if (saveregs_value != 0)
7194 return saveregs_value;
7195
7196 /* When this function is called, it means that registers must be
7197 saved on entry to this function. So we migrate the call to the
7198 first insn of this function. */
7199
7200 start_sequence ();
7201
7202 /* Do whatever the machine needs done in this case. */
7203 val = targetm.calls.expand_builtin_saveregs ();
7204
7205 seq = get_insns ();
7206 end_sequence ();
7207
7208 saveregs_value = val;
7209
7210 /* Put the insns after the NOTE that starts the function. If this
7211 is inside a start_sequence, make the outer-level insn chain current, so
7212 the code is placed at the start of the function. */
7213 push_topmost_sequence ();
7214 emit_insn_after (seq, entry_of_function ());
7215 pop_topmost_sequence ();
7216
7217 return val;
7218 }
7219
7220 /* Expand a call to __builtin_next_arg. */
7221
7222 static rtx
7223 expand_builtin_next_arg (void)
7224 {
7225 /* Checking arguments is already done in fold_builtin_next_arg
7226 that must be called before this function. */
7227 return expand_binop (ptr_mode, add_optab,
7228 crtl->args.internal_arg_pointer,
7229 crtl->args.arg_offset_rtx,
7230 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7231 }
7232
7233 /* Make it easier for the backends by protecting the valist argument
7234 from multiple evaluations. */
7235
7236 static tree
7237 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
7238 {
7239 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
7240
7241 /* The current way of determining the type of valist is completely
7242 bogus. We should have the information on the va builtin instead. */
7243 if (!vatype)
7244 vatype = targetm.fn_abi_va_list (cfun->decl);
7245
7246 if (TREE_CODE (vatype) == ARRAY_TYPE)
7247 {
7248 if (TREE_SIDE_EFFECTS (valist))
7249 valist = save_expr (valist);
7250
7251 /* For this case, the backends will be expecting a pointer to
7252 vatype, but it's possible we've actually been given an array
7253 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
7254 So fix it. */
7255 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
7256 {
7257 tree p1 = build_pointer_type (TREE_TYPE (vatype));
7258 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
7259 }
7260 }
7261 else
7262 {
7263 tree pt = build_pointer_type (vatype);
7264
7265 if (! needs_lvalue)
7266 {
7267 if (! TREE_SIDE_EFFECTS (valist))
7268 return valist;
7269
7270 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
7271 TREE_SIDE_EFFECTS (valist) = 1;
7272 }
7273
7274 if (TREE_SIDE_EFFECTS (valist))
7275 valist = save_expr (valist);
7276 valist = fold_build2_loc (loc, MEM_REF,
7277 vatype, valist, build_int_cst (pt, 0));
7278 }
7279
7280 return valist;
7281 }
7282
7283 /* The "standard" definition of va_list is void*. */
7284
7285 tree
7286 std_build_builtin_va_list (void)
7287 {
7288 return ptr_type_node;
7289 }
7290
7291 /* The "standard" abi va_list is va_list_type_node. */
7292
7293 tree
7294 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
7295 {
7296 return va_list_type_node;
7297 }
7298
7299 /* The "standard" type of va_list is va_list_type_node. */
7300
7301 tree
7302 std_canonical_va_list_type (tree type)
7303 {
7304 tree wtype, htype;
7305
7306 wtype = va_list_type_node;
7307 htype = type;
7308
7309 if (TREE_CODE (wtype) == ARRAY_TYPE)
7310 {
7311 /* If va_list is an array type, the argument may have decayed
7312 to a pointer type, e.g. by being passed to another function.
7313 In that case, unwrap both types so that we can compare the
7314 underlying records. */
7315 if (TREE_CODE (htype) == ARRAY_TYPE
7316 || POINTER_TYPE_P (htype))
7317 {
7318 wtype = TREE_TYPE (wtype);
7319 htype = TREE_TYPE (htype);
7320 }
7321 }
7322 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
7323 return va_list_type_node;
7324
7325 return NULL_TREE;
7326 }
7327
7328 /* The "standard" implementation of va_start: just assign `nextarg' to
7329 the variable. */
7330
7331 void
7332 std_expand_builtin_va_start (tree valist, rtx nextarg)
7333 {
7334 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
7335 convert_move (va_r, nextarg, 0);
7336 }
7337
7338 /* Expand EXP, a call to __builtin_va_start. */
7339
7340 static rtx
7341 expand_builtin_va_start (tree exp)
7342 {
7343 rtx nextarg;
7344 tree valist;
7345 location_t loc = EXPR_LOCATION (exp);
7346
7347 if (call_expr_nargs (exp) < 2)
7348 {
7349 error_at (loc, "too few arguments to function %<va_start%>");
7350 return const0_rtx;
7351 }
7352
7353 if (fold_builtin_next_arg (exp, true))
7354 return const0_rtx;
7355
7356 nextarg = expand_builtin_next_arg ();
7357 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
7358
7359 if (targetm.expand_builtin_va_start)
7360 targetm.expand_builtin_va_start (valist, nextarg);
7361 else
7362 std_expand_builtin_va_start (valist, nextarg);
7363
7364 return const0_rtx;
7365 }
7366
7367 /* Expand EXP, a call to __builtin_va_end. */
7368
7369 static rtx
7370 expand_builtin_va_end (tree exp)
7371 {
7372 tree valist = CALL_EXPR_ARG (exp, 0);
7373
7374 /* Evaluate for side effects, if needed. I hate macros that don't
7375 do that. */
7376 if (TREE_SIDE_EFFECTS (valist))
7377 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
7378
7379 return const0_rtx;
7380 }
7381
7382 /* Expand EXP, a call to __builtin_va_copy. We do this as a
7383 builtin rather than just as an assignment in stdarg.h because of the
7384 nastiness of array-type va_list types. */
7385
7386 static rtx
7387 expand_builtin_va_copy (tree exp)
7388 {
7389 tree dst, src, t;
7390 location_t loc = EXPR_LOCATION (exp);
7391
7392 dst = CALL_EXPR_ARG (exp, 0);
7393 src = CALL_EXPR_ARG (exp, 1);
7394
7395 dst = stabilize_va_list_loc (loc, dst, 1);
7396 src = stabilize_va_list_loc (loc, src, 0);
7397
7398 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
7399
7400 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
7401 {
7402 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
7403 TREE_SIDE_EFFECTS (t) = 1;
7404 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7405 }
7406 else
7407 {
7408 rtx dstb, srcb, size;
7409
7410 /* Evaluate to pointers. */
7411 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
7412 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
7413 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
7414 NULL_RTX, VOIDmode, EXPAND_NORMAL);
7415
7416 dstb = convert_memory_address (Pmode, dstb);
7417 srcb = convert_memory_address (Pmode, srcb);
7418
7419 /* "Dereference" to BLKmode memories. */
7420 dstb = gen_rtx_MEM (BLKmode, dstb);
7421 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
7422 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7423 srcb = gen_rtx_MEM (BLKmode, srcb);
7424 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
7425 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7426
7427 /* Copy. */
7428 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
7429 }
7430
7431 return const0_rtx;
7432 }
7433
7434 /* Expand a call to one of the builtin functions __builtin_frame_address or
7435 __builtin_return_address. */
7436
7437 static rtx
7438 expand_builtin_frame_address (tree fndecl, tree exp)
7439 {
7440 /* The argument must be a nonnegative integer constant.
7441 It counts the number of frames to scan up the stack.
7442 The value is either the frame pointer value or the return
7443 address saved in that frame. */
7444 if (call_expr_nargs (exp) == 0)
7445 /* Warning about missing arg was already issued. */
7446 return const0_rtx;
7447 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
7448 {
7449 error ("invalid argument to %qD", fndecl);
7450 return const0_rtx;
7451 }
7452 else
7453 {
7454 /* Number of frames to scan up the stack. */
7455 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
7456
7457 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
7458
7459 /* Some ports cannot access arbitrary stack frames. */
7460 if (tem == NULL)
7461 {
7462 warning (0, "unsupported argument to %qD", fndecl);
7463 return const0_rtx;
7464 }
7465
7466 if (count)
7467 {
7468 /* Warn since no effort is made to ensure that any frame
7469 beyond the current one exists or can be safely reached. */
7470 warning (OPT_Wframe_address, "calling %qD with "
7471 "a nonzero argument is unsafe", fndecl);
7472 }
7473
7474 /* For __builtin_frame_address, return what we've got. */
7475 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7476 return tem;
7477
7478 if (!REG_P (tem)
7479 && ! CONSTANT_P (tem))
7480 tem = copy_addr_to_reg (tem);
7481 return tem;
7482 }
7483 }
7484
7485 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
7486 failed and the caller should emit a normal call. */
7487
7488 static rtx
7489 expand_builtin_alloca (tree exp)
7490 {
7491 rtx op0;
7492 rtx result;
7493 unsigned int align;
7494 tree fndecl = get_callee_fndecl (exp);
7495 HOST_WIDE_INT max_size;
7496 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7497 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
7498 bool valid_arglist
7499 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7500 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
7501 VOID_TYPE)
7502 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
7503 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
7504 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
7505
7506 if (!valid_arglist)
7507 return NULL_RTX;
7508
7509 if ((alloca_for_var
7510 && warn_vla_limit >= HOST_WIDE_INT_MAX
7511 && warn_alloc_size_limit < warn_vla_limit)
7512 || (!alloca_for_var
7513 && warn_alloca_limit >= HOST_WIDE_INT_MAX
7514 && warn_alloc_size_limit < warn_alloca_limit
7515 ))
7516 {
7517 /* -Walloca-larger-than and -Wvla-larger-than settings of
7518 less than HOST_WIDE_INT_MAX override the more general
7519 -Walloc-size-larger-than so unless either of the former
7520 options is smaller than the last one (wchich would imply
7521 that the call was already checked), check the alloca
7522 arguments for overflow. */
7523 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
7524 int idx[] = { 0, -1 };
7525 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
7526 }
7527
7528 /* Compute the argument. */
7529 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
7530
7531 /* Compute the alignment. */
7532 align = (fcode == BUILT_IN_ALLOCA
7533 ? BIGGEST_ALIGNMENT
7534 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
7535
7536 /* Compute the maximum size. */
7537 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7538 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
7539 : -1);
7540
7541 /* Allocate the desired space. If the allocation stems from the declaration
7542 of a variable-sized object, it cannot accumulate. */
7543 result
7544 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
7545 result = convert_memory_address (ptr_mode, result);
7546
7547 /* Dynamic allocations for variables are recorded during gimplification. */
7548 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
7549 record_dynamic_alloc (exp);
7550
7551 return result;
7552 }
7553
7554 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
7555 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
7556 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
7557 handle_builtin_stack_restore function. */
7558
7559 static rtx
7560 expand_asan_emit_allocas_unpoison (tree exp)
7561 {
7562 tree arg0 = CALL_EXPR_ARG (exp, 0);
7563 tree arg1 = CALL_EXPR_ARG (exp, 1);
7564 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7565 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7566 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
7567 stack_pointer_rtx, NULL_RTX, 0,
7568 OPTAB_LIB_WIDEN);
7569 off = convert_modes (ptr_mode, Pmode, off, 0);
7570 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
7571 OPTAB_LIB_WIDEN);
7572 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
7573 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
7574 top, ptr_mode, bot, ptr_mode);
7575 return ret;
7576 }
7577
7578 /* Expand a call to bswap builtin in EXP.
7579 Return NULL_RTX if a normal call should be emitted rather than expanding the
7580 function in-line. If convenient, the result should be placed in TARGET.
7581 SUBTARGET may be used as the target for computing one of EXP's operands. */
7582
7583 static rtx
7584 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
7585 rtx subtarget)
7586 {
7587 tree arg;
7588 rtx op0;
7589
7590 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7591 return NULL_RTX;
7592
7593 arg = CALL_EXPR_ARG (exp, 0);
7594 op0 = expand_expr (arg,
7595 subtarget && GET_MODE (subtarget) == target_mode
7596 ? subtarget : NULL_RTX,
7597 target_mode, EXPAND_NORMAL);
7598 if (GET_MODE (op0) != target_mode)
7599 op0 = convert_to_mode (target_mode, op0, 1);
7600
7601 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
7602
7603 gcc_assert (target);
7604
7605 return convert_to_mode (target_mode, target, 1);
7606 }
7607
7608 /* Expand a call to a unary builtin in EXP.
7609 Return NULL_RTX if a normal call should be emitted rather than expanding the
7610 function in-line. If convenient, the result should be placed in TARGET.
7611 SUBTARGET may be used as the target for computing one of EXP's operands. */
7612
7613 static rtx
7614 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
7615 rtx subtarget, optab op_optab)
7616 {
7617 rtx op0;
7618
7619 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7620 return NULL_RTX;
7621
7622 /* Compute the argument. */
7623 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
7624 (subtarget
7625 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
7626 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
7627 VOIDmode, EXPAND_NORMAL);
7628 /* Compute op, into TARGET if possible.
7629 Set TARGET to wherever the result comes back. */
7630 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
7631 op_optab, op0, target, op_optab != clrsb_optab);
7632 gcc_assert (target);
7633
7634 return convert_to_mode (target_mode, target, 0);
7635 }
7636
7637 /* Expand a call to __builtin_expect. We just return our argument
7638 as the builtin_expect semantic should've been already executed by
7639 tree branch prediction pass. */
7640
7641 static rtx
7642 expand_builtin_expect (tree exp, rtx target)
7643 {
7644 tree arg;
7645
7646 if (call_expr_nargs (exp) < 2)
7647 return const0_rtx;
7648 arg = CALL_EXPR_ARG (exp, 0);
7649
7650 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7651 /* When guessing was done, the hints should be already stripped away. */
7652 gcc_assert (!flag_guess_branch_prob
7653 || optimize == 0 || seen_error ());
7654 return target;
7655 }
7656
7657 /* Expand a call to __builtin_expect_with_probability. We just return our
7658 argument as the builtin_expect semantic should've been already executed by
7659 tree branch prediction pass. */
7660
7661 static rtx
7662 expand_builtin_expect_with_probability (tree exp, rtx target)
7663 {
7664 tree arg;
7665
7666 if (call_expr_nargs (exp) < 3)
7667 return const0_rtx;
7668 arg = CALL_EXPR_ARG (exp, 0);
7669
7670 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7671 /* When guessing was done, the hints should be already stripped away. */
7672 gcc_assert (!flag_guess_branch_prob
7673 || optimize == 0 || seen_error ());
7674 return target;
7675 }
7676
7677
7678 /* Expand a call to __builtin_assume_aligned. We just return our first
7679 argument as the builtin_assume_aligned semantic should've been already
7680 executed by CCP. */
7681
7682 static rtx
7683 expand_builtin_assume_aligned (tree exp, rtx target)
7684 {
7685 if (call_expr_nargs (exp) < 2)
7686 return const0_rtx;
7687 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
7688 EXPAND_NORMAL);
7689 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
7690 && (call_expr_nargs (exp) < 3
7691 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
7692 return target;
7693 }
7694
7695 void
7696 expand_builtin_trap (void)
7697 {
7698 if (targetm.have_trap ())
7699 {
7700 rtx_insn *insn = emit_insn (targetm.gen_trap ());
7701 /* For trap insns when not accumulating outgoing args force
7702 REG_ARGS_SIZE note to prevent crossjumping of calls with
7703 different args sizes. */
7704 if (!ACCUMULATE_OUTGOING_ARGS)
7705 add_args_size_note (insn, stack_pointer_delta);
7706 }
7707 else
7708 {
7709 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
7710 tree call_expr = build_call_expr (fn, 0);
7711 expand_call (call_expr, NULL_RTX, false);
7712 }
7713
7714 emit_barrier ();
7715 }
7716
7717 /* Expand a call to __builtin_unreachable. We do nothing except emit
7718 a barrier saying that control flow will not pass here.
7719
7720 It is the responsibility of the program being compiled to ensure
7721 that control flow does never reach __builtin_unreachable. */
7722 static void
7723 expand_builtin_unreachable (void)
7724 {
7725 emit_barrier ();
7726 }
7727
7728 /* Expand EXP, a call to fabs, fabsf or fabsl.
7729 Return NULL_RTX if a normal call should be emitted rather than expanding
7730 the function inline. If convenient, the result should be placed
7731 in TARGET. SUBTARGET may be used as the target for computing
7732 the operand. */
7733
7734 static rtx
7735 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
7736 {
7737 machine_mode mode;
7738 tree arg;
7739 rtx op0;
7740
7741 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
7742 return NULL_RTX;
7743
7744 arg = CALL_EXPR_ARG (exp, 0);
7745 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7746 mode = TYPE_MODE (TREE_TYPE (arg));
7747 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7748 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
7749 }
7750
7751 /* Expand EXP, a call to copysign, copysignf, or copysignl.
7752 Return NULL is a normal call should be emitted rather than expanding the
7753 function inline. If convenient, the result should be placed in TARGET.
7754 SUBTARGET may be used as the target for computing the operand. */
7755
7756 static rtx
7757 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
7758 {
7759 rtx op0, op1;
7760 tree arg;
7761
7762 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
7763 return NULL_RTX;
7764
7765 arg = CALL_EXPR_ARG (exp, 0);
7766 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7767
7768 arg = CALL_EXPR_ARG (exp, 1);
7769 op1 = expand_normal (arg);
7770
7771 return expand_copysign (op0, op1, target);
7772 }
7773
7774 /* Emit a call to __builtin___clear_cache. */
7775
7776 void
7777 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
7778 {
7779 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
7780 BUILTIN_ASM_NAME_PTR
7781 (BUILT_IN_CLEAR_CACHE));
7782
7783 emit_library_call (callee,
7784 LCT_NORMAL, VOIDmode,
7785 convert_memory_address (ptr_mode, begin), ptr_mode,
7786 convert_memory_address (ptr_mode, end), ptr_mode);
7787 }
7788
7789 /* Emit a call to __builtin___clear_cache, unless the target specifies
7790 it as do-nothing. This function can be used by trampoline
7791 finalizers to duplicate the effects of expanding a call to the
7792 clear_cache builtin. */
7793
7794 void
7795 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
7796 {
7797 if ((GET_MODE (begin) != ptr_mode && GET_MODE (begin) != Pmode)
7798 || (GET_MODE (end) != ptr_mode && GET_MODE (end) != Pmode))
7799 {
7800 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7801 return;
7802 }
7803
7804 if (targetm.have_clear_cache ())
7805 {
7806 /* We have a "clear_cache" insn, and it will handle everything. */
7807 class expand_operand ops[2];
7808
7809 create_address_operand (&ops[0], begin);
7810 create_address_operand (&ops[1], end);
7811
7812 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
7813 return;
7814 }
7815 else
7816 {
7817 #ifndef CLEAR_INSN_CACHE
7818 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
7819 does nothing. There is no need to call it. Do nothing. */
7820 return;
7821 #endif /* CLEAR_INSN_CACHE */
7822 }
7823
7824 targetm.calls.emit_call_builtin___clear_cache (begin, end);
7825 }
7826
7827 /* Expand a call to __builtin___clear_cache. */
7828
7829 static void
7830 expand_builtin___clear_cache (tree exp)
7831 {
7832 tree begin, end;
7833 rtx begin_rtx, end_rtx;
7834
7835 /* We must not expand to a library call. If we did, any
7836 fallback library function in libgcc that might contain a call to
7837 __builtin___clear_cache() would recurse infinitely. */
7838 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7839 {
7840 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7841 return;
7842 }
7843
7844 begin = CALL_EXPR_ARG (exp, 0);
7845 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
7846
7847 end = CALL_EXPR_ARG (exp, 1);
7848 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
7849
7850 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
7851 }
7852
7853 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
7854
7855 static rtx
7856 round_trampoline_addr (rtx tramp)
7857 {
7858 rtx temp, addend, mask;
7859
7860 /* If we don't need too much alignment, we'll have been guaranteed
7861 proper alignment by get_trampoline_type. */
7862 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
7863 return tramp;
7864
7865 /* Round address up to desired boundary. */
7866 temp = gen_reg_rtx (Pmode);
7867 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
7868 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
7869
7870 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
7871 temp, 0, OPTAB_LIB_WIDEN);
7872 tramp = expand_simple_binop (Pmode, AND, temp, mask,
7873 temp, 0, OPTAB_LIB_WIDEN);
7874
7875 return tramp;
7876 }
7877
7878 static rtx
7879 expand_builtin_init_trampoline (tree exp, bool onstack)
7880 {
7881 tree t_tramp, t_func, t_chain;
7882 rtx m_tramp, r_tramp, r_chain, tmp;
7883
7884 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
7885 POINTER_TYPE, VOID_TYPE))
7886 return NULL_RTX;
7887
7888 t_tramp = CALL_EXPR_ARG (exp, 0);
7889 t_func = CALL_EXPR_ARG (exp, 1);
7890 t_chain = CALL_EXPR_ARG (exp, 2);
7891
7892 r_tramp = expand_normal (t_tramp);
7893 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
7894 MEM_NOTRAP_P (m_tramp) = 1;
7895
7896 /* If ONSTACK, the TRAMP argument should be the address of a field
7897 within the local function's FRAME decl. Either way, let's see if
7898 we can fill in the MEM_ATTRs for this memory. */
7899 if (TREE_CODE (t_tramp) == ADDR_EXPR)
7900 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
7901
7902 /* Creator of a heap trampoline is responsible for making sure the
7903 address is aligned to at least STACK_BOUNDARY. Normally malloc
7904 will ensure this anyhow. */
7905 tmp = round_trampoline_addr (r_tramp);
7906 if (tmp != r_tramp)
7907 {
7908 m_tramp = change_address (m_tramp, BLKmode, tmp);
7909 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
7910 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
7911 }
7912
7913 /* The FUNC argument should be the address of the nested function.
7914 Extract the actual function decl to pass to the hook. */
7915 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
7916 t_func = TREE_OPERAND (t_func, 0);
7917 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
7918
7919 r_chain = expand_normal (t_chain);
7920
7921 /* Generate insns to initialize the trampoline. */
7922 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
7923
7924 if (onstack)
7925 {
7926 trampolines_created = 1;
7927
7928 if (targetm.calls.custom_function_descriptors != 0)
7929 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
7930 "trampoline generated for nested function %qD", t_func);
7931 }
7932
7933 return const0_rtx;
7934 }
7935
7936 static rtx
7937 expand_builtin_adjust_trampoline (tree exp)
7938 {
7939 rtx tramp;
7940
7941 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7942 return NULL_RTX;
7943
7944 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
7945 tramp = round_trampoline_addr (tramp);
7946 if (targetm.calls.trampoline_adjust_address)
7947 tramp = targetm.calls.trampoline_adjust_address (tramp);
7948
7949 return tramp;
7950 }
7951
7952 /* Expand a call to the builtin descriptor initialization routine.
7953 A descriptor is made up of a couple of pointers to the static
7954 chain and the code entry in this order. */
7955
7956 static rtx
7957 expand_builtin_init_descriptor (tree exp)
7958 {
7959 tree t_descr, t_func, t_chain;
7960 rtx m_descr, r_descr, r_func, r_chain;
7961
7962 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
7963 VOID_TYPE))
7964 return NULL_RTX;
7965
7966 t_descr = CALL_EXPR_ARG (exp, 0);
7967 t_func = CALL_EXPR_ARG (exp, 1);
7968 t_chain = CALL_EXPR_ARG (exp, 2);
7969
7970 r_descr = expand_normal (t_descr);
7971 m_descr = gen_rtx_MEM (BLKmode, r_descr);
7972 MEM_NOTRAP_P (m_descr) = 1;
7973 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
7974
7975 r_func = expand_normal (t_func);
7976 r_chain = expand_normal (t_chain);
7977
7978 /* Generate insns to initialize the descriptor. */
7979 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
7980 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
7981 POINTER_SIZE / BITS_PER_UNIT), r_func);
7982
7983 return const0_rtx;
7984 }
7985
7986 /* Expand a call to the builtin descriptor adjustment routine. */
7987
7988 static rtx
7989 expand_builtin_adjust_descriptor (tree exp)
7990 {
7991 rtx tramp;
7992
7993 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7994 return NULL_RTX;
7995
7996 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
7997
7998 /* Unalign the descriptor to allow runtime identification. */
7999 tramp = plus_constant (ptr_mode, tramp,
8000 targetm.calls.custom_function_descriptors);
8001
8002 return force_operand (tramp, NULL_RTX);
8003 }
8004
8005 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
8006 function. The function first checks whether the back end provides
8007 an insn to implement signbit for the respective mode. If not, it
8008 checks whether the floating point format of the value is such that
8009 the sign bit can be extracted. If that is not the case, error out.
8010 EXP is the expression that is a call to the builtin function; if
8011 convenient, the result should be placed in TARGET. */
8012 static rtx
8013 expand_builtin_signbit (tree exp, rtx target)
8014 {
8015 const struct real_format *fmt;
8016 scalar_float_mode fmode;
8017 scalar_int_mode rmode, imode;
8018 tree arg;
8019 int word, bitpos;
8020 enum insn_code icode;
8021 rtx temp;
8022 location_t loc = EXPR_LOCATION (exp);
8023
8024 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
8025 return NULL_RTX;
8026
8027 arg = CALL_EXPR_ARG (exp, 0);
8028 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
8029 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
8030 fmt = REAL_MODE_FORMAT (fmode);
8031
8032 arg = builtin_save_expr (arg);
8033
8034 /* Expand the argument yielding a RTX expression. */
8035 temp = expand_normal (arg);
8036
8037 /* Check if the back end provides an insn that handles signbit for the
8038 argument's mode. */
8039 icode = optab_handler (signbit_optab, fmode);
8040 if (icode != CODE_FOR_nothing)
8041 {
8042 rtx_insn *last = get_last_insn ();
8043 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8044 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
8045 return target;
8046 delete_insns_since (last);
8047 }
8048
8049 /* For floating point formats without a sign bit, implement signbit
8050 as "ARG < 0.0". */
8051 bitpos = fmt->signbit_ro;
8052 if (bitpos < 0)
8053 {
8054 /* But we can't do this if the format supports signed zero. */
8055 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
8056
8057 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
8058 build_real (TREE_TYPE (arg), dconst0));
8059 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
8060 }
8061
8062 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
8063 {
8064 imode = int_mode_for_mode (fmode).require ();
8065 temp = gen_lowpart (imode, temp);
8066 }
8067 else
8068 {
8069 imode = word_mode;
8070 /* Handle targets with different FP word orders. */
8071 if (FLOAT_WORDS_BIG_ENDIAN)
8072 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
8073 else
8074 word = bitpos / BITS_PER_WORD;
8075 temp = operand_subword_force (temp, word, fmode);
8076 bitpos = bitpos % BITS_PER_WORD;
8077 }
8078
8079 /* Force the intermediate word_mode (or narrower) result into a
8080 register. This avoids attempting to create paradoxical SUBREGs
8081 of floating point modes below. */
8082 temp = force_reg (imode, temp);
8083
8084 /* If the bitpos is within the "result mode" lowpart, the operation
8085 can be implement with a single bitwise AND. Otherwise, we need
8086 a right shift and an AND. */
8087
8088 if (bitpos < GET_MODE_BITSIZE (rmode))
8089 {
8090 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
8091
8092 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
8093 temp = gen_lowpart (rmode, temp);
8094 temp = expand_binop (rmode, and_optab, temp,
8095 immed_wide_int_const (mask, rmode),
8096 NULL_RTX, 1, OPTAB_LIB_WIDEN);
8097 }
8098 else
8099 {
8100 /* Perform a logical right shift to place the signbit in the least
8101 significant bit, then truncate the result to the desired mode
8102 and mask just this bit. */
8103 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
8104 temp = gen_lowpart (rmode, temp);
8105 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
8106 NULL_RTX, 1, OPTAB_LIB_WIDEN);
8107 }
8108
8109 return temp;
8110 }
8111
8112 /* Expand fork or exec calls. TARGET is the desired target of the
8113 call. EXP is the call. FN is the
8114 identificator of the actual function. IGNORE is nonzero if the
8115 value is to be ignored. */
8116
8117 static rtx
8118 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
8119 {
8120 tree id, decl;
8121 tree call;
8122
8123 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
8124 {
8125 tree path = CALL_EXPR_ARG (exp, 0);
8126 /* Detect unterminated path. */
8127 if (!check_read_access (exp, path))
8128 return NULL_RTX;
8129
8130 /* Also detect unterminated first argument. */
8131 switch (DECL_FUNCTION_CODE (fn))
8132 {
8133 case BUILT_IN_EXECL:
8134 case BUILT_IN_EXECLE:
8135 case BUILT_IN_EXECLP:
8136 if (!check_read_access (exp, path))
8137 return NULL_RTX;
8138 default:
8139 break;
8140 }
8141 }
8142
8143
8144 /* If we are not profiling, just call the function. */
8145 if (!profile_arc_flag)
8146 return NULL_RTX;
8147
8148 /* Otherwise call the wrapper. This should be equivalent for the rest of
8149 compiler, so the code does not diverge, and the wrapper may run the
8150 code necessary for keeping the profiling sane. */
8151
8152 switch (DECL_FUNCTION_CODE (fn))
8153 {
8154 case BUILT_IN_FORK:
8155 id = get_identifier ("__gcov_fork");
8156 break;
8157
8158 case BUILT_IN_EXECL:
8159 id = get_identifier ("__gcov_execl");
8160 break;
8161
8162 case BUILT_IN_EXECV:
8163 id = get_identifier ("__gcov_execv");
8164 break;
8165
8166 case BUILT_IN_EXECLP:
8167 id = get_identifier ("__gcov_execlp");
8168 break;
8169
8170 case BUILT_IN_EXECLE:
8171 id = get_identifier ("__gcov_execle");
8172 break;
8173
8174 case BUILT_IN_EXECVP:
8175 id = get_identifier ("__gcov_execvp");
8176 break;
8177
8178 case BUILT_IN_EXECVE:
8179 id = get_identifier ("__gcov_execve");
8180 break;
8181
8182 default:
8183 gcc_unreachable ();
8184 }
8185
8186 decl = build_decl (DECL_SOURCE_LOCATION (fn),
8187 FUNCTION_DECL, id, TREE_TYPE (fn));
8188 DECL_EXTERNAL (decl) = 1;
8189 TREE_PUBLIC (decl) = 1;
8190 DECL_ARTIFICIAL (decl) = 1;
8191 TREE_NOTHROW (decl) = 1;
8192 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
8193 DECL_VISIBILITY_SPECIFIED (decl) = 1;
8194 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
8195 return expand_call (call, target, ignore);
8196 }
8197
8198
8199 \f
8200 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
8201 the pointer in these functions is void*, the tree optimizers may remove
8202 casts. The mode computed in expand_builtin isn't reliable either, due
8203 to __sync_bool_compare_and_swap.
8204
8205 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
8206 group of builtins. This gives us log2 of the mode size. */
8207
8208 static inline machine_mode
8209 get_builtin_sync_mode (int fcode_diff)
8210 {
8211 /* The size is not negotiable, so ask not to get BLKmode in return
8212 if the target indicates that a smaller size would be better. */
8213 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
8214 }
8215
8216 /* Expand the memory expression LOC and return the appropriate memory operand
8217 for the builtin_sync operations. */
8218
8219 static rtx
8220 get_builtin_sync_mem (tree loc, machine_mode mode)
8221 {
8222 rtx addr, mem;
8223 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
8224 ? TREE_TYPE (TREE_TYPE (loc))
8225 : TREE_TYPE (loc));
8226 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
8227
8228 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
8229 addr = convert_memory_address (addr_mode, addr);
8230
8231 /* Note that we explicitly do not want any alias information for this
8232 memory, so that we kill all other live memories. Otherwise we don't
8233 satisfy the full barrier semantics of the intrinsic. */
8234 mem = gen_rtx_MEM (mode, addr);
8235
8236 set_mem_addr_space (mem, addr_space);
8237
8238 mem = validize_mem (mem);
8239
8240 /* The alignment needs to be at least according to that of the mode. */
8241 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
8242 get_pointer_alignment (loc)));
8243 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
8244 MEM_VOLATILE_P (mem) = 1;
8245
8246 return mem;
8247 }
8248
8249 /* Make sure an argument is in the right mode.
8250 EXP is the tree argument.
8251 MODE is the mode it should be in. */
8252
8253 static rtx
8254 expand_expr_force_mode (tree exp, machine_mode mode)
8255 {
8256 rtx val;
8257 machine_mode old_mode;
8258
8259 if (TREE_CODE (exp) == SSA_NAME
8260 && TYPE_MODE (TREE_TYPE (exp)) != mode)
8261 {
8262 /* Undo argument promotion if possible, as combine might not
8263 be able to do it later due to MEM_VOLATILE_P uses in the
8264 patterns. */
8265 gimple *g = get_gimple_for_ssa_name (exp);
8266 if (g && gimple_assign_cast_p (g))
8267 {
8268 tree rhs = gimple_assign_rhs1 (g);
8269 tree_code code = gimple_assign_rhs_code (g);
8270 if (CONVERT_EXPR_CODE_P (code)
8271 && TYPE_MODE (TREE_TYPE (rhs)) == mode
8272 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
8273 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
8274 && (TYPE_PRECISION (TREE_TYPE (exp))
8275 > TYPE_PRECISION (TREE_TYPE (rhs))))
8276 exp = rhs;
8277 }
8278 }
8279
8280 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
8281 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
8282 of CONST_INTs, where we know the old_mode only from the call argument. */
8283
8284 old_mode = GET_MODE (val);
8285 if (old_mode == VOIDmode)
8286 old_mode = TYPE_MODE (TREE_TYPE (exp));
8287 val = convert_modes (mode, old_mode, val, 1);
8288 return val;
8289 }
8290
8291
8292 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
8293 EXP is the CALL_EXPR. CODE is the rtx code
8294 that corresponds to the arithmetic or logical operation from the name;
8295 an exception here is that NOT actually means NAND. TARGET is an optional
8296 place for us to store the results; AFTER is true if this is the
8297 fetch_and_xxx form. */
8298
8299 static rtx
8300 expand_builtin_sync_operation (machine_mode mode, tree exp,
8301 enum rtx_code code, bool after,
8302 rtx target)
8303 {
8304 rtx val, mem;
8305 location_t loc = EXPR_LOCATION (exp);
8306
8307 if (code == NOT && warn_sync_nand)
8308 {
8309 tree fndecl = get_callee_fndecl (exp);
8310 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8311
8312 static bool warned_f_a_n, warned_n_a_f;
8313
8314 switch (fcode)
8315 {
8316 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8317 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8318 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8319 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8320 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8321 if (warned_f_a_n)
8322 break;
8323
8324 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
8325 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8326 warned_f_a_n = true;
8327 break;
8328
8329 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8330 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8331 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8332 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8333 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8334 if (warned_n_a_f)
8335 break;
8336
8337 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
8338 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8339 warned_n_a_f = true;
8340 break;
8341
8342 default:
8343 gcc_unreachable ();
8344 }
8345 }
8346
8347 /* Expand the operands. */
8348 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8349 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8350
8351 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
8352 after);
8353 }
8354
8355 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
8356 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
8357 true if this is the boolean form. TARGET is a place for us to store the
8358 results; this is NOT optional if IS_BOOL is true. */
8359
8360 static rtx
8361 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
8362 bool is_bool, rtx target)
8363 {
8364 rtx old_val, new_val, mem;
8365 rtx *pbool, *poval;
8366
8367 /* Expand the operands. */
8368 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8369 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8370 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8371
8372 pbool = poval = NULL;
8373 if (target != const0_rtx)
8374 {
8375 if (is_bool)
8376 pbool = &target;
8377 else
8378 poval = &target;
8379 }
8380 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
8381 false, MEMMODEL_SYNC_SEQ_CST,
8382 MEMMODEL_SYNC_SEQ_CST))
8383 return NULL_RTX;
8384
8385 return target;
8386 }
8387
8388 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
8389 general form is actually an atomic exchange, and some targets only
8390 support a reduced form with the second argument being a constant 1.
8391 EXP is the CALL_EXPR; TARGET is an optional place for us to store
8392 the results. */
8393
8394 static rtx
8395 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
8396 rtx target)
8397 {
8398 rtx val, mem;
8399
8400 /* Expand the operands. */
8401 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8402 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8403
8404 return expand_sync_lock_test_and_set (target, mem, val);
8405 }
8406
8407 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
8408
8409 static void
8410 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
8411 {
8412 rtx mem;
8413
8414 /* Expand the operands. */
8415 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8416
8417 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
8418 }
8419
8420 /* Given an integer representing an ``enum memmodel'', verify its
8421 correctness and return the memory model enum. */
8422
8423 static enum memmodel
8424 get_memmodel (tree exp)
8425 {
8426 rtx op;
8427 unsigned HOST_WIDE_INT val;
8428 location_t loc
8429 = expansion_point_location_if_in_system_header (input_location);
8430
8431 /* If the parameter is not a constant, it's a run time value so we'll just
8432 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
8433 if (TREE_CODE (exp) != INTEGER_CST)
8434 return MEMMODEL_SEQ_CST;
8435
8436 op = expand_normal (exp);
8437
8438 val = INTVAL (op);
8439 if (targetm.memmodel_check)
8440 val = targetm.memmodel_check (val);
8441 else if (val & ~MEMMODEL_MASK)
8442 {
8443 warning_at (loc, OPT_Winvalid_memory_model,
8444 "unknown architecture specifier in memory model to builtin");
8445 return MEMMODEL_SEQ_CST;
8446 }
8447
8448 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
8449 if (memmodel_base (val) >= MEMMODEL_LAST)
8450 {
8451 warning_at (loc, OPT_Winvalid_memory_model,
8452 "invalid memory model argument to builtin");
8453 return MEMMODEL_SEQ_CST;
8454 }
8455
8456 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
8457 be conservative and promote consume to acquire. */
8458 if (val == MEMMODEL_CONSUME)
8459 val = MEMMODEL_ACQUIRE;
8460
8461 return (enum memmodel) val;
8462 }
8463
8464 /* Expand the __atomic_exchange intrinsic:
8465 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
8466 EXP is the CALL_EXPR.
8467 TARGET is an optional place for us to store the results. */
8468
8469 static rtx
8470 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
8471 {
8472 rtx val, mem;
8473 enum memmodel model;
8474
8475 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8476
8477 if (!flag_inline_atomics)
8478 return NULL_RTX;
8479
8480 /* Expand the operands. */
8481 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8482 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8483
8484 return expand_atomic_exchange (target, mem, val, model);
8485 }
8486
8487 /* Expand the __atomic_compare_exchange intrinsic:
8488 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
8489 TYPE desired, BOOL weak,
8490 enum memmodel success,
8491 enum memmodel failure)
8492 EXP is the CALL_EXPR.
8493 TARGET is an optional place for us to store the results. */
8494
8495 static rtx
8496 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
8497 rtx target)
8498 {
8499 rtx expect, desired, mem, oldval;
8500 rtx_code_label *label;
8501 enum memmodel success, failure;
8502 tree weak;
8503 bool is_weak;
8504 location_t loc
8505 = expansion_point_location_if_in_system_header (input_location);
8506
8507 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
8508 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
8509
8510 if (failure > success)
8511 {
8512 warning_at (loc, OPT_Winvalid_memory_model,
8513 "failure memory model cannot be stronger than success "
8514 "memory model for %<__atomic_compare_exchange%>");
8515 success = MEMMODEL_SEQ_CST;
8516 }
8517
8518 if (is_mm_release (failure) || is_mm_acq_rel (failure))
8519 {
8520 warning_at (loc, OPT_Winvalid_memory_model,
8521 "invalid failure memory model for "
8522 "%<__atomic_compare_exchange%>");
8523 failure = MEMMODEL_SEQ_CST;
8524 success = MEMMODEL_SEQ_CST;
8525 }
8526
8527
8528 if (!flag_inline_atomics)
8529 return NULL_RTX;
8530
8531 /* Expand the operands. */
8532 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8533
8534 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
8535 expect = convert_memory_address (Pmode, expect);
8536 expect = gen_rtx_MEM (mode, expect);
8537 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8538
8539 weak = CALL_EXPR_ARG (exp, 3);
8540 is_weak = false;
8541 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
8542 is_weak = true;
8543
8544 if (target == const0_rtx)
8545 target = NULL;
8546
8547 /* Lest the rtl backend create a race condition with an imporoper store
8548 to memory, always create a new pseudo for OLDVAL. */
8549 oldval = NULL;
8550
8551 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
8552 is_weak, success, failure))
8553 return NULL_RTX;
8554
8555 /* Conditionally store back to EXPECT, lest we create a race condition
8556 with an improper store to memory. */
8557 /* ??? With a rearrangement of atomics at the gimple level, we can handle
8558 the normal case where EXPECT is totally private, i.e. a register. At
8559 which point the store can be unconditional. */
8560 label = gen_label_rtx ();
8561 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
8562 GET_MODE (target), 1, label);
8563 emit_move_insn (expect, oldval);
8564 emit_label (label);
8565
8566 return target;
8567 }
8568
8569 /* Helper function for expand_ifn_atomic_compare_exchange - expand
8570 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
8571 call. The weak parameter must be dropped to match the expected parameter
8572 list and the expected argument changed from value to pointer to memory
8573 slot. */
8574
8575 static void
8576 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
8577 {
8578 unsigned int z;
8579 vec<tree, va_gc> *vec;
8580
8581 vec_alloc (vec, 5);
8582 vec->quick_push (gimple_call_arg (call, 0));
8583 tree expected = gimple_call_arg (call, 1);
8584 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
8585 TREE_TYPE (expected));
8586 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
8587 if (expd != x)
8588 emit_move_insn (x, expd);
8589 tree v = make_tree (TREE_TYPE (expected), x);
8590 vec->quick_push (build1 (ADDR_EXPR,
8591 build_pointer_type (TREE_TYPE (expected)), v));
8592 vec->quick_push (gimple_call_arg (call, 2));
8593 /* Skip the boolean weak parameter. */
8594 for (z = 4; z < 6; z++)
8595 vec->quick_push (gimple_call_arg (call, z));
8596 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
8597 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
8598 gcc_assert (bytes_log2 < 5);
8599 built_in_function fncode
8600 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
8601 + bytes_log2);
8602 tree fndecl = builtin_decl_explicit (fncode);
8603 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
8604 fndecl);
8605 tree exp = build_call_vec (boolean_type_node, fn, vec);
8606 tree lhs = gimple_call_lhs (call);
8607 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
8608 if (lhs)
8609 {
8610 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8611 if (GET_MODE (boolret) != mode)
8612 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8613 x = force_reg (mode, x);
8614 write_complex_part (target, boolret, true);
8615 write_complex_part (target, x, false);
8616 }
8617 }
8618
8619 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
8620
8621 void
8622 expand_ifn_atomic_compare_exchange (gcall *call)
8623 {
8624 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
8625 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
8626 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
8627 rtx expect, desired, mem, oldval, boolret;
8628 enum memmodel success, failure;
8629 tree lhs;
8630 bool is_weak;
8631 location_t loc
8632 = expansion_point_location_if_in_system_header (gimple_location (call));
8633
8634 success = get_memmodel (gimple_call_arg (call, 4));
8635 failure = get_memmodel (gimple_call_arg (call, 5));
8636
8637 if (failure > success)
8638 {
8639 warning_at (loc, OPT_Winvalid_memory_model,
8640 "failure memory model cannot be stronger than success "
8641 "memory model for %<__atomic_compare_exchange%>");
8642 success = MEMMODEL_SEQ_CST;
8643 }
8644
8645 if (is_mm_release (failure) || is_mm_acq_rel (failure))
8646 {
8647 warning_at (loc, OPT_Winvalid_memory_model,
8648 "invalid failure memory model for "
8649 "%<__atomic_compare_exchange%>");
8650 failure = MEMMODEL_SEQ_CST;
8651 success = MEMMODEL_SEQ_CST;
8652 }
8653
8654 if (!flag_inline_atomics)
8655 {
8656 expand_ifn_atomic_compare_exchange_into_call (call, mode);
8657 return;
8658 }
8659
8660 /* Expand the operands. */
8661 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
8662
8663 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
8664 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
8665
8666 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
8667
8668 boolret = NULL;
8669 oldval = NULL;
8670
8671 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
8672 is_weak, success, failure))
8673 {
8674 expand_ifn_atomic_compare_exchange_into_call (call, mode);
8675 return;
8676 }
8677
8678 lhs = gimple_call_lhs (call);
8679 if (lhs)
8680 {
8681 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8682 if (GET_MODE (boolret) != mode)
8683 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8684 write_complex_part (target, boolret, true);
8685 write_complex_part (target, oldval, false);
8686 }
8687 }
8688
8689 /* Expand the __atomic_load intrinsic:
8690 TYPE __atomic_load (TYPE *object, enum memmodel)
8691 EXP is the CALL_EXPR.
8692 TARGET is an optional place for us to store the results. */
8693
8694 static rtx
8695 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
8696 {
8697 rtx mem;
8698 enum memmodel model;
8699
8700 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8701 if (is_mm_release (model) || is_mm_acq_rel (model))
8702 {
8703 location_t loc
8704 = expansion_point_location_if_in_system_header (input_location);
8705 warning_at (loc, OPT_Winvalid_memory_model,
8706 "invalid memory model for %<__atomic_load%>");
8707 model = MEMMODEL_SEQ_CST;
8708 }
8709
8710 if (!flag_inline_atomics)
8711 return NULL_RTX;
8712
8713 /* Expand the operand. */
8714 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8715
8716 return expand_atomic_load (target, mem, model);
8717 }
8718
8719
8720 /* Expand the __atomic_store intrinsic:
8721 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
8722 EXP is the CALL_EXPR.
8723 TARGET is an optional place for us to store the results. */
8724
8725 static rtx
8726 expand_builtin_atomic_store (machine_mode mode, tree exp)
8727 {
8728 rtx mem, val;
8729 enum memmodel model;
8730
8731 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8732 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
8733 || is_mm_release (model)))
8734 {
8735 location_t loc
8736 = expansion_point_location_if_in_system_header (input_location);
8737 warning_at (loc, OPT_Winvalid_memory_model,
8738 "invalid memory model for %<__atomic_store%>");
8739 model = MEMMODEL_SEQ_CST;
8740 }
8741
8742 if (!flag_inline_atomics)
8743 return NULL_RTX;
8744
8745 /* Expand the operands. */
8746 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8747 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8748
8749 return expand_atomic_store (mem, val, model, false);
8750 }
8751
8752 /* Expand the __atomic_fetch_XXX intrinsic:
8753 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
8754 EXP is the CALL_EXPR.
8755 TARGET is an optional place for us to store the results.
8756 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
8757 FETCH_AFTER is true if returning the result of the operation.
8758 FETCH_AFTER is false if returning the value before the operation.
8759 IGNORE is true if the result is not used.
8760 EXT_CALL is the correct builtin for an external call if this cannot be
8761 resolved to an instruction sequence. */
8762
8763 static rtx
8764 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
8765 enum rtx_code code, bool fetch_after,
8766 bool ignore, enum built_in_function ext_call)
8767 {
8768 rtx val, mem, ret;
8769 enum memmodel model;
8770 tree fndecl;
8771 tree addr;
8772
8773 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8774
8775 /* Expand the operands. */
8776 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8777 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8778
8779 /* Only try generating instructions if inlining is turned on. */
8780 if (flag_inline_atomics)
8781 {
8782 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
8783 if (ret)
8784 return ret;
8785 }
8786
8787 /* Return if a different routine isn't needed for the library call. */
8788 if (ext_call == BUILT_IN_NONE)
8789 return NULL_RTX;
8790
8791 /* Change the call to the specified function. */
8792 fndecl = get_callee_fndecl (exp);
8793 addr = CALL_EXPR_FN (exp);
8794 STRIP_NOPS (addr);
8795
8796 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
8797 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
8798
8799 /* If we will emit code after the call, the call cannot be a tail call.
8800 If it is emitted as a tail call, a barrier is emitted after it, and
8801 then all trailing code is removed. */
8802 if (!ignore)
8803 CALL_EXPR_TAILCALL (exp) = 0;
8804
8805 /* Expand the call here so we can emit trailing code. */
8806 ret = expand_call (exp, target, ignore);
8807
8808 /* Replace the original function just in case it matters. */
8809 TREE_OPERAND (addr, 0) = fndecl;
8810
8811 /* Then issue the arithmetic correction to return the right result. */
8812 if (!ignore)
8813 {
8814 if (code == NOT)
8815 {
8816 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
8817 OPTAB_LIB_WIDEN);
8818 ret = expand_simple_unop (mode, NOT, ret, target, true);
8819 }
8820 else
8821 ret = expand_simple_binop (mode, code, ret, val, target, true,
8822 OPTAB_LIB_WIDEN);
8823 }
8824 return ret;
8825 }
8826
8827 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
8828
8829 void
8830 expand_ifn_atomic_bit_test_and (gcall *call)
8831 {
8832 tree ptr = gimple_call_arg (call, 0);
8833 tree bit = gimple_call_arg (call, 1);
8834 tree flag = gimple_call_arg (call, 2);
8835 tree lhs = gimple_call_lhs (call);
8836 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
8837 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
8838 enum rtx_code code;
8839 optab optab;
8840 class expand_operand ops[5];
8841
8842 gcc_assert (flag_inline_atomics);
8843
8844 if (gimple_call_num_args (call) == 4)
8845 model = get_memmodel (gimple_call_arg (call, 3));
8846
8847 rtx mem = get_builtin_sync_mem (ptr, mode);
8848 rtx val = expand_expr_force_mode (bit, mode);
8849
8850 switch (gimple_call_internal_fn (call))
8851 {
8852 case IFN_ATOMIC_BIT_TEST_AND_SET:
8853 code = IOR;
8854 optab = atomic_bit_test_and_set_optab;
8855 break;
8856 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
8857 code = XOR;
8858 optab = atomic_bit_test_and_complement_optab;
8859 break;
8860 case IFN_ATOMIC_BIT_TEST_AND_RESET:
8861 code = AND;
8862 optab = atomic_bit_test_and_reset_optab;
8863 break;
8864 default:
8865 gcc_unreachable ();
8866 }
8867
8868 if (lhs == NULL_TREE)
8869 {
8870 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8871 val, NULL_RTX, true, OPTAB_DIRECT);
8872 if (code == AND)
8873 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8874 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
8875 return;
8876 }
8877
8878 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8879 enum insn_code icode = direct_optab_handler (optab, mode);
8880 gcc_assert (icode != CODE_FOR_nothing);
8881 create_output_operand (&ops[0], target, mode);
8882 create_fixed_operand (&ops[1], mem);
8883 create_convert_operand_to (&ops[2], val, mode, true);
8884 create_integer_operand (&ops[3], model);
8885 create_integer_operand (&ops[4], integer_onep (flag));
8886 if (maybe_expand_insn (icode, 5, ops))
8887 return;
8888
8889 rtx bitval = val;
8890 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8891 val, NULL_RTX, true, OPTAB_DIRECT);
8892 rtx maskval = val;
8893 if (code == AND)
8894 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8895 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
8896 code, model, false);
8897 if (integer_onep (flag))
8898 {
8899 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
8900 NULL_RTX, true, OPTAB_DIRECT);
8901 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
8902 true, OPTAB_DIRECT);
8903 }
8904 else
8905 result = expand_simple_binop (mode, AND, result, maskval, target, true,
8906 OPTAB_DIRECT);
8907 if (result != target)
8908 emit_move_insn (target, result);
8909 }
8910
8911 /* Expand an atomic clear operation.
8912 void _atomic_clear (BOOL *obj, enum memmodel)
8913 EXP is the call expression. */
8914
8915 static rtx
8916 expand_builtin_atomic_clear (tree exp)
8917 {
8918 machine_mode mode;
8919 rtx mem, ret;
8920 enum memmodel model;
8921
8922 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
8923 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8924 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8925
8926 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
8927 {
8928 location_t loc
8929 = expansion_point_location_if_in_system_header (input_location);
8930 warning_at (loc, OPT_Winvalid_memory_model,
8931 "invalid memory model for %<__atomic_store%>");
8932 model = MEMMODEL_SEQ_CST;
8933 }
8934
8935 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
8936 Failing that, a store is issued by __atomic_store. The only way this can
8937 fail is if the bool type is larger than a word size. Unlikely, but
8938 handle it anyway for completeness. Assume a single threaded model since
8939 there is no atomic support in this case, and no barriers are required. */
8940 ret = expand_atomic_store (mem, const0_rtx, model, true);
8941 if (!ret)
8942 emit_move_insn (mem, const0_rtx);
8943 return const0_rtx;
8944 }
8945
8946 /* Expand an atomic test_and_set operation.
8947 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
8948 EXP is the call expression. */
8949
8950 static rtx
8951 expand_builtin_atomic_test_and_set (tree exp, rtx target)
8952 {
8953 rtx mem;
8954 enum memmodel model;
8955 machine_mode mode;
8956
8957 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
8958 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8959 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8960
8961 return expand_atomic_test_and_set (target, mem, model);
8962 }
8963
8964
8965 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
8966 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
8967
8968 static tree
8969 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
8970 {
8971 int size;
8972 machine_mode mode;
8973 unsigned int mode_align, type_align;
8974
8975 if (TREE_CODE (arg0) != INTEGER_CST)
8976 return NULL_TREE;
8977
8978 /* We need a corresponding integer mode for the access to be lock-free. */
8979 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
8980 if (!int_mode_for_size (size, 0).exists (&mode))
8981 return boolean_false_node;
8982
8983 mode_align = GET_MODE_ALIGNMENT (mode);
8984
8985 if (TREE_CODE (arg1) == INTEGER_CST)
8986 {
8987 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
8988
8989 /* Either this argument is null, or it's a fake pointer encoding
8990 the alignment of the object. */
8991 val = least_bit_hwi (val);
8992 val *= BITS_PER_UNIT;
8993
8994 if (val == 0 || mode_align < val)
8995 type_align = mode_align;
8996 else
8997 type_align = val;
8998 }
8999 else
9000 {
9001 tree ttype = TREE_TYPE (arg1);
9002
9003 /* This function is usually invoked and folded immediately by the front
9004 end before anything else has a chance to look at it. The pointer
9005 parameter at this point is usually cast to a void *, so check for that
9006 and look past the cast. */
9007 if (CONVERT_EXPR_P (arg1)
9008 && POINTER_TYPE_P (ttype)
9009 && VOID_TYPE_P (TREE_TYPE (ttype))
9010 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
9011 arg1 = TREE_OPERAND (arg1, 0);
9012
9013 ttype = TREE_TYPE (arg1);
9014 gcc_assert (POINTER_TYPE_P (ttype));
9015
9016 /* Get the underlying type of the object. */
9017 ttype = TREE_TYPE (ttype);
9018 type_align = TYPE_ALIGN (ttype);
9019 }
9020
9021 /* If the object has smaller alignment, the lock free routines cannot
9022 be used. */
9023 if (type_align < mode_align)
9024 return boolean_false_node;
9025
9026 /* Check if a compare_and_swap pattern exists for the mode which represents
9027 the required size. The pattern is not allowed to fail, so the existence
9028 of the pattern indicates support is present. Also require that an
9029 atomic load exists for the required size. */
9030 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
9031 return boolean_true_node;
9032 else
9033 return boolean_false_node;
9034 }
9035
9036 /* Return true if the parameters to call EXP represent an object which will
9037 always generate lock free instructions. The first argument represents the
9038 size of the object, and the second parameter is a pointer to the object
9039 itself. If NULL is passed for the object, then the result is based on
9040 typical alignment for an object of the specified size. Otherwise return
9041 false. */
9042
9043 static rtx
9044 expand_builtin_atomic_always_lock_free (tree exp)
9045 {
9046 tree size;
9047 tree arg0 = CALL_EXPR_ARG (exp, 0);
9048 tree arg1 = CALL_EXPR_ARG (exp, 1);
9049
9050 if (TREE_CODE (arg0) != INTEGER_CST)
9051 {
9052 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
9053 return const0_rtx;
9054 }
9055
9056 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
9057 if (size == boolean_true_node)
9058 return const1_rtx;
9059 return const0_rtx;
9060 }
9061
9062 /* Return a one or zero if it can be determined that object ARG1 of size ARG
9063 is lock free on this architecture. */
9064
9065 static tree
9066 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
9067 {
9068 if (!flag_inline_atomics)
9069 return NULL_TREE;
9070
9071 /* If it isn't always lock free, don't generate a result. */
9072 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
9073 return boolean_true_node;
9074
9075 return NULL_TREE;
9076 }
9077
9078 /* Return true if the parameters to call EXP represent an object which will
9079 always generate lock free instructions. The first argument represents the
9080 size of the object, and the second parameter is a pointer to the object
9081 itself. If NULL is passed for the object, then the result is based on
9082 typical alignment for an object of the specified size. Otherwise return
9083 NULL*/
9084
9085 static rtx
9086 expand_builtin_atomic_is_lock_free (tree exp)
9087 {
9088 tree size;
9089 tree arg0 = CALL_EXPR_ARG (exp, 0);
9090 tree arg1 = CALL_EXPR_ARG (exp, 1);
9091
9092 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9093 {
9094 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
9095 return NULL_RTX;
9096 }
9097
9098 if (!flag_inline_atomics)
9099 return NULL_RTX;
9100
9101 /* If the value is known at compile time, return the RTX for it. */
9102 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
9103 if (size == boolean_true_node)
9104 return const1_rtx;
9105
9106 return NULL_RTX;
9107 }
9108
9109 /* Expand the __atomic_thread_fence intrinsic:
9110 void __atomic_thread_fence (enum memmodel)
9111 EXP is the CALL_EXPR. */
9112
9113 static void
9114 expand_builtin_atomic_thread_fence (tree exp)
9115 {
9116 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9117 expand_mem_thread_fence (model);
9118 }
9119
9120 /* Expand the __atomic_signal_fence intrinsic:
9121 void __atomic_signal_fence (enum memmodel)
9122 EXP is the CALL_EXPR. */
9123
9124 static void
9125 expand_builtin_atomic_signal_fence (tree exp)
9126 {
9127 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9128 expand_mem_signal_fence (model);
9129 }
9130
9131 /* Expand the __sync_synchronize intrinsic. */
9132
9133 static void
9134 expand_builtin_sync_synchronize (void)
9135 {
9136 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
9137 }
9138
9139 static rtx
9140 expand_builtin_thread_pointer (tree exp, rtx target)
9141 {
9142 enum insn_code icode;
9143 if (!validate_arglist (exp, VOID_TYPE))
9144 return const0_rtx;
9145 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
9146 if (icode != CODE_FOR_nothing)
9147 {
9148 class expand_operand op;
9149 /* If the target is not sutitable then create a new target. */
9150 if (target == NULL_RTX
9151 || !REG_P (target)
9152 || GET_MODE (target) != Pmode)
9153 target = gen_reg_rtx (Pmode);
9154 create_output_operand (&op, target, Pmode);
9155 expand_insn (icode, 1, &op);
9156 return target;
9157 }
9158 error ("%<__builtin_thread_pointer%> is not supported on this target");
9159 return const0_rtx;
9160 }
9161
9162 static void
9163 expand_builtin_set_thread_pointer (tree exp)
9164 {
9165 enum insn_code icode;
9166 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9167 return;
9168 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
9169 if (icode != CODE_FOR_nothing)
9170 {
9171 class expand_operand op;
9172 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
9173 Pmode, EXPAND_NORMAL);
9174 create_input_operand (&op, val, Pmode);
9175 expand_insn (icode, 1, &op);
9176 return;
9177 }
9178 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
9179 }
9180
9181 \f
9182 /* Emit code to restore the current value of stack. */
9183
9184 static void
9185 expand_stack_restore (tree var)
9186 {
9187 rtx_insn *prev;
9188 rtx sa = expand_normal (var);
9189
9190 sa = convert_memory_address (Pmode, sa);
9191
9192 prev = get_last_insn ();
9193 emit_stack_restore (SAVE_BLOCK, sa);
9194
9195 record_new_stack_level ();
9196
9197 fixup_args_size_notes (prev, get_last_insn (), 0);
9198 }
9199
9200 /* Emit code to save the current value of stack. */
9201
9202 static rtx
9203 expand_stack_save (void)
9204 {
9205 rtx ret = NULL_RTX;
9206
9207 emit_stack_save (SAVE_BLOCK, &ret);
9208 return ret;
9209 }
9210
9211 /* Emit code to get the openacc gang, worker or vector id or size. */
9212
9213 static rtx
9214 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
9215 {
9216 const char *name;
9217 rtx fallback_retval;
9218 rtx_insn *(*gen_fn) (rtx, rtx);
9219 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
9220 {
9221 case BUILT_IN_GOACC_PARLEVEL_ID:
9222 name = "__builtin_goacc_parlevel_id";
9223 fallback_retval = const0_rtx;
9224 gen_fn = targetm.gen_oacc_dim_pos;
9225 break;
9226 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9227 name = "__builtin_goacc_parlevel_size";
9228 fallback_retval = const1_rtx;
9229 gen_fn = targetm.gen_oacc_dim_size;
9230 break;
9231 default:
9232 gcc_unreachable ();
9233 }
9234
9235 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
9236 {
9237 error ("%qs only supported in OpenACC code", name);
9238 return const0_rtx;
9239 }
9240
9241 tree arg = CALL_EXPR_ARG (exp, 0);
9242 if (TREE_CODE (arg) != INTEGER_CST)
9243 {
9244 error ("non-constant argument 0 to %qs", name);
9245 return const0_rtx;
9246 }
9247
9248 int dim = TREE_INT_CST_LOW (arg);
9249 switch (dim)
9250 {
9251 case GOMP_DIM_GANG:
9252 case GOMP_DIM_WORKER:
9253 case GOMP_DIM_VECTOR:
9254 break;
9255 default:
9256 error ("illegal argument 0 to %qs", name);
9257 return const0_rtx;
9258 }
9259
9260 if (ignore)
9261 return target;
9262
9263 if (target == NULL_RTX)
9264 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9265
9266 if (!targetm.have_oacc_dim_size ())
9267 {
9268 emit_move_insn (target, fallback_retval);
9269 return target;
9270 }
9271
9272 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
9273 emit_insn (gen_fn (reg, GEN_INT (dim)));
9274 if (reg != target)
9275 emit_move_insn (target, reg);
9276
9277 return target;
9278 }
9279
9280 /* Expand a string compare operation using a sequence of char comparison
9281 to get rid of the calling overhead, with result going to TARGET if
9282 that's convenient.
9283
9284 VAR_STR is the variable string source;
9285 CONST_STR is the constant string source;
9286 LENGTH is the number of chars to compare;
9287 CONST_STR_N indicates which source string is the constant string;
9288 IS_MEMCMP indicates whether it's a memcmp or strcmp.
9289
9290 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
9291
9292 target = (int) (unsigned char) var_str[0]
9293 - (int) (unsigned char) const_str[0];
9294 if (target != 0)
9295 goto ne_label;
9296 ...
9297 target = (int) (unsigned char) var_str[length - 2]
9298 - (int) (unsigned char) const_str[length - 2];
9299 if (target != 0)
9300 goto ne_label;
9301 target = (int) (unsigned char) var_str[length - 1]
9302 - (int) (unsigned char) const_str[length - 1];
9303 ne_label:
9304 */
9305
9306 static rtx
9307 inline_string_cmp (rtx target, tree var_str, const char *const_str,
9308 unsigned HOST_WIDE_INT length,
9309 int const_str_n, machine_mode mode)
9310 {
9311 HOST_WIDE_INT offset = 0;
9312 rtx var_rtx_array
9313 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
9314 rtx var_rtx = NULL_RTX;
9315 rtx const_rtx = NULL_RTX;
9316 rtx result = target ? target : gen_reg_rtx (mode);
9317 rtx_code_label *ne_label = gen_label_rtx ();
9318 tree unit_type_node = unsigned_char_type_node;
9319 scalar_int_mode unit_mode
9320 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
9321
9322 start_sequence ();
9323
9324 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
9325 {
9326 var_rtx
9327 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
9328 const_rtx = c_readstr (const_str + offset, unit_mode);
9329 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
9330 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
9331
9332 op0 = convert_modes (mode, unit_mode, op0, 1);
9333 op1 = convert_modes (mode, unit_mode, op1, 1);
9334 result = expand_simple_binop (mode, MINUS, op0, op1,
9335 result, 1, OPTAB_WIDEN);
9336 if (i < length - 1)
9337 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
9338 mode, true, ne_label);
9339 offset += GET_MODE_SIZE (unit_mode);
9340 }
9341
9342 emit_label (ne_label);
9343 rtx_insn *insns = get_insns ();
9344 end_sequence ();
9345 emit_insn (insns);
9346
9347 return result;
9348 }
9349
9350 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
9351 to TARGET if that's convenient.
9352 If the call is not been inlined, return NULL_RTX. */
9353
9354 static rtx
9355 inline_expand_builtin_bytecmp (tree exp, rtx target)
9356 {
9357 tree fndecl = get_callee_fndecl (exp);
9358 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9359 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
9360
9361 /* Do NOT apply this inlining expansion when optimizing for size or
9362 optimization level below 2. */
9363 if (optimize < 2 || optimize_insn_for_size_p ())
9364 return NULL_RTX;
9365
9366 gcc_checking_assert (fcode == BUILT_IN_STRCMP
9367 || fcode == BUILT_IN_STRNCMP
9368 || fcode == BUILT_IN_MEMCMP);
9369
9370 /* On a target where the type of the call (int) has same or narrower presicion
9371 than unsigned char, give up the inlining expansion. */
9372 if (TYPE_PRECISION (unsigned_char_type_node)
9373 >= TYPE_PRECISION (TREE_TYPE (exp)))
9374 return NULL_RTX;
9375
9376 tree arg1 = CALL_EXPR_ARG (exp, 0);
9377 tree arg2 = CALL_EXPR_ARG (exp, 1);
9378 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
9379
9380 unsigned HOST_WIDE_INT len1 = 0;
9381 unsigned HOST_WIDE_INT len2 = 0;
9382 unsigned HOST_WIDE_INT len3 = 0;
9383
9384 /* Get the object representation of the initializers of ARG1 and ARG2
9385 as strings, provided they refer to constant objects, with their byte
9386 sizes in LEN1 and LEN2, respectively. */
9387 const char *bytes1 = getbyterep (arg1, &len1);
9388 const char *bytes2 = getbyterep (arg2, &len2);
9389
9390 /* Fail if neither argument refers to an initialized constant. */
9391 if (!bytes1 && !bytes2)
9392 return NULL_RTX;
9393
9394 if (is_ncmp)
9395 {
9396 /* Fail if the memcmp/strncmp bound is not a constant. */
9397 if (!tree_fits_uhwi_p (len3_tree))
9398 return NULL_RTX;
9399
9400 len3 = tree_to_uhwi (len3_tree);
9401
9402 if (fcode == BUILT_IN_MEMCMP)
9403 {
9404 /* Fail if the memcmp bound is greater than the size of either
9405 of the two constant objects. */
9406 if ((bytes1 && len1 < len3)
9407 || (bytes2 && len2 < len3))
9408 return NULL_RTX;
9409 }
9410 }
9411
9412 if (fcode != BUILT_IN_MEMCMP)
9413 {
9414 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
9415 and LEN2 to the length of the nul-terminated string stored
9416 in each. */
9417 if (bytes1 != NULL)
9418 len1 = strnlen (bytes1, len1) + 1;
9419 if (bytes2 != NULL)
9420 len2 = strnlen (bytes2, len2) + 1;
9421 }
9422
9423 /* See inline_string_cmp. */
9424 int const_str_n;
9425 if (!len1)
9426 const_str_n = 2;
9427 else if (!len2)
9428 const_str_n = 1;
9429 else if (len2 > len1)
9430 const_str_n = 1;
9431 else
9432 const_str_n = 2;
9433
9434 /* For strncmp only, compute the new bound as the smallest of
9435 the lengths of the two strings (plus 1) and the bound provided
9436 to the function. */
9437 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
9438 if (is_ncmp && len3 < bound)
9439 bound = len3;
9440
9441 /* If the bound of the comparison is larger than the threshold,
9442 do nothing. */
9443 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
9444 return NULL_RTX;
9445
9446 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9447
9448 /* Now, start inline expansion the call. */
9449 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
9450 (const_str_n == 1) ? bytes1 : bytes2, bound,
9451 const_str_n, mode);
9452 }
9453
9454 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
9455 represents the size of the first argument to that call, or VOIDmode
9456 if the argument is a pointer. IGNORE will be true if the result
9457 isn't used. */
9458 static rtx
9459 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
9460 bool ignore)
9461 {
9462 rtx val, failsafe;
9463 unsigned nargs = call_expr_nargs (exp);
9464
9465 tree arg0 = CALL_EXPR_ARG (exp, 0);
9466
9467 if (mode == VOIDmode)
9468 {
9469 mode = TYPE_MODE (TREE_TYPE (arg0));
9470 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
9471 }
9472
9473 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
9474
9475 /* An optional second argument can be used as a failsafe value on
9476 some machines. If it isn't present, then the failsafe value is
9477 assumed to be 0. */
9478 if (nargs > 1)
9479 {
9480 tree arg1 = CALL_EXPR_ARG (exp, 1);
9481 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
9482 }
9483 else
9484 failsafe = const0_rtx;
9485
9486 /* If the result isn't used, the behavior is undefined. It would be
9487 nice to emit a warning here, but path splitting means this might
9488 happen with legitimate code. So simply drop the builtin
9489 expansion in that case; we've handled any side-effects above. */
9490 if (ignore)
9491 return const0_rtx;
9492
9493 /* If we don't have a suitable target, create one to hold the result. */
9494 if (target == NULL || GET_MODE (target) != mode)
9495 target = gen_reg_rtx (mode);
9496
9497 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
9498 val = convert_modes (mode, VOIDmode, val, false);
9499
9500 return targetm.speculation_safe_value (mode, target, val, failsafe);
9501 }
9502
9503 /* Expand an expression EXP that calls a built-in function,
9504 with result going to TARGET if that's convenient
9505 (and in mode MODE if that's convenient).
9506 SUBTARGET may be used as the target for computing one of EXP's operands.
9507 IGNORE is nonzero if the value is to be ignored. */
9508
9509 rtx
9510 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
9511 int ignore)
9512 {
9513 tree fndecl = get_callee_fndecl (exp);
9514 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9515 int flags;
9516
9517 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9518 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
9519
9520 /* When ASan is enabled, we don't want to expand some memory/string
9521 builtins and rely on libsanitizer's hooks. This allows us to avoid
9522 redundant checks and be sure, that possible overflow will be detected
9523 by ASan. */
9524
9525 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9526 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
9527 return expand_call (exp, target, ignore);
9528
9529 /* When not optimizing, generate calls to library functions for a certain
9530 set of builtins. */
9531 if (!optimize
9532 && !called_as_built_in (fndecl)
9533 && fcode != BUILT_IN_FORK
9534 && fcode != BUILT_IN_EXECL
9535 && fcode != BUILT_IN_EXECV
9536 && fcode != BUILT_IN_EXECLP
9537 && fcode != BUILT_IN_EXECLE
9538 && fcode != BUILT_IN_EXECVP
9539 && fcode != BUILT_IN_EXECVE
9540 && fcode != BUILT_IN_CLEAR_CACHE
9541 && !ALLOCA_FUNCTION_CODE_P (fcode)
9542 && fcode != BUILT_IN_FREE)
9543 return expand_call (exp, target, ignore);
9544
9545 /* The built-in function expanders test for target == const0_rtx
9546 to determine whether the function's result will be ignored. */
9547 if (ignore)
9548 target = const0_rtx;
9549
9550 /* If the result of a pure or const built-in function is ignored, and
9551 none of its arguments are volatile, we can avoid expanding the
9552 built-in call and just evaluate the arguments for side-effects. */
9553 if (target == const0_rtx
9554 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
9555 && !(flags & ECF_LOOPING_CONST_OR_PURE))
9556 {
9557 bool volatilep = false;
9558 tree arg;
9559 call_expr_arg_iterator iter;
9560
9561 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9562 if (TREE_THIS_VOLATILE (arg))
9563 {
9564 volatilep = true;
9565 break;
9566 }
9567
9568 if (! volatilep)
9569 {
9570 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9571 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
9572 return const0_rtx;
9573 }
9574 }
9575
9576 switch (fcode)
9577 {
9578 CASE_FLT_FN (BUILT_IN_FABS):
9579 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9580 case BUILT_IN_FABSD32:
9581 case BUILT_IN_FABSD64:
9582 case BUILT_IN_FABSD128:
9583 target = expand_builtin_fabs (exp, target, subtarget);
9584 if (target)
9585 return target;
9586 break;
9587
9588 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9589 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
9590 target = expand_builtin_copysign (exp, target, subtarget);
9591 if (target)
9592 return target;
9593 break;
9594
9595 /* Just do a normal library call if we were unable to fold
9596 the values. */
9597 CASE_FLT_FN (BUILT_IN_CABS):
9598 break;
9599
9600 CASE_FLT_FN (BUILT_IN_FMA):
9601 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9602 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
9603 if (target)
9604 return target;
9605 break;
9606
9607 CASE_FLT_FN (BUILT_IN_ILOGB):
9608 if (! flag_unsafe_math_optimizations)
9609 break;
9610 gcc_fallthrough ();
9611 CASE_FLT_FN (BUILT_IN_ISINF):
9612 CASE_FLT_FN (BUILT_IN_FINITE):
9613 case BUILT_IN_ISFINITE:
9614 case BUILT_IN_ISNORMAL:
9615 target = expand_builtin_interclass_mathfn (exp, target);
9616 if (target)
9617 return target;
9618 break;
9619
9620 CASE_FLT_FN (BUILT_IN_ICEIL):
9621 CASE_FLT_FN (BUILT_IN_LCEIL):
9622 CASE_FLT_FN (BUILT_IN_LLCEIL):
9623 CASE_FLT_FN (BUILT_IN_LFLOOR):
9624 CASE_FLT_FN (BUILT_IN_IFLOOR):
9625 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9626 target = expand_builtin_int_roundingfn (exp, target);
9627 if (target)
9628 return target;
9629 break;
9630
9631 CASE_FLT_FN (BUILT_IN_IRINT):
9632 CASE_FLT_FN (BUILT_IN_LRINT):
9633 CASE_FLT_FN (BUILT_IN_LLRINT):
9634 CASE_FLT_FN (BUILT_IN_IROUND):
9635 CASE_FLT_FN (BUILT_IN_LROUND):
9636 CASE_FLT_FN (BUILT_IN_LLROUND):
9637 target = expand_builtin_int_roundingfn_2 (exp, target);
9638 if (target)
9639 return target;
9640 break;
9641
9642 CASE_FLT_FN (BUILT_IN_POWI):
9643 target = expand_builtin_powi (exp, target);
9644 if (target)
9645 return target;
9646 break;
9647
9648 CASE_FLT_FN (BUILT_IN_CEXPI):
9649 target = expand_builtin_cexpi (exp, target);
9650 gcc_assert (target);
9651 return target;
9652
9653 CASE_FLT_FN (BUILT_IN_SIN):
9654 CASE_FLT_FN (BUILT_IN_COS):
9655 if (! flag_unsafe_math_optimizations)
9656 break;
9657 target = expand_builtin_mathfn_3 (exp, target, subtarget);
9658 if (target)
9659 return target;
9660 break;
9661
9662 CASE_FLT_FN (BUILT_IN_SINCOS):
9663 if (! flag_unsafe_math_optimizations)
9664 break;
9665 target = expand_builtin_sincos (exp);
9666 if (target)
9667 return target;
9668 break;
9669
9670 case BUILT_IN_APPLY_ARGS:
9671 return expand_builtin_apply_args ();
9672
9673 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
9674 FUNCTION with a copy of the parameters described by
9675 ARGUMENTS, and ARGSIZE. It returns a block of memory
9676 allocated on the stack into which is stored all the registers
9677 that might possibly be used for returning the result of a
9678 function. ARGUMENTS is the value returned by
9679 __builtin_apply_args. ARGSIZE is the number of bytes of
9680 arguments that must be copied. ??? How should this value be
9681 computed? We'll also need a safe worst case value for varargs
9682 functions. */
9683 case BUILT_IN_APPLY:
9684 if (!validate_arglist (exp, POINTER_TYPE,
9685 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
9686 && !validate_arglist (exp, REFERENCE_TYPE,
9687 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9688 return const0_rtx;
9689 else
9690 {
9691 rtx ops[3];
9692
9693 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
9694 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
9695 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
9696
9697 return expand_builtin_apply (ops[0], ops[1], ops[2]);
9698 }
9699
9700 /* __builtin_return (RESULT) causes the function to return the
9701 value described by RESULT. RESULT is address of the block of
9702 memory returned by __builtin_apply. */
9703 case BUILT_IN_RETURN:
9704 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9705 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
9706 return const0_rtx;
9707
9708 case BUILT_IN_SAVEREGS:
9709 return expand_builtin_saveregs ();
9710
9711 case BUILT_IN_VA_ARG_PACK:
9712 /* All valid uses of __builtin_va_arg_pack () are removed during
9713 inlining. */
9714 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9715 return const0_rtx;
9716
9717 case BUILT_IN_VA_ARG_PACK_LEN:
9718 /* All valid uses of __builtin_va_arg_pack_len () are removed during
9719 inlining. */
9720 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
9721 return const0_rtx;
9722
9723 /* Return the address of the first anonymous stack arg. */
9724 case BUILT_IN_NEXT_ARG:
9725 if (fold_builtin_next_arg (exp, false))
9726 return const0_rtx;
9727 return expand_builtin_next_arg ();
9728
9729 case BUILT_IN_CLEAR_CACHE:
9730 expand_builtin___clear_cache (exp);
9731 return const0_rtx;
9732
9733 case BUILT_IN_CLASSIFY_TYPE:
9734 return expand_builtin_classify_type (exp);
9735
9736 case BUILT_IN_CONSTANT_P:
9737 return const0_rtx;
9738
9739 case BUILT_IN_FRAME_ADDRESS:
9740 case BUILT_IN_RETURN_ADDRESS:
9741 return expand_builtin_frame_address (fndecl, exp);
9742
9743 /* Returns the address of the area where the structure is returned.
9744 0 otherwise. */
9745 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9746 if (call_expr_nargs (exp) != 0
9747 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9748 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9749 return const0_rtx;
9750 else
9751 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9752
9753 CASE_BUILT_IN_ALLOCA:
9754 target = expand_builtin_alloca (exp);
9755 if (target)
9756 return target;
9757 break;
9758
9759 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
9760 return expand_asan_emit_allocas_unpoison (exp);
9761
9762 case BUILT_IN_STACK_SAVE:
9763 return expand_stack_save ();
9764
9765 case BUILT_IN_STACK_RESTORE:
9766 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
9767 return const0_rtx;
9768
9769 case BUILT_IN_BSWAP16:
9770 case BUILT_IN_BSWAP32:
9771 case BUILT_IN_BSWAP64:
9772 case BUILT_IN_BSWAP128:
9773 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
9774 if (target)
9775 return target;
9776 break;
9777
9778 CASE_INT_FN (BUILT_IN_FFS):
9779 target = expand_builtin_unop (target_mode, exp, target,
9780 subtarget, ffs_optab);
9781 if (target)
9782 return target;
9783 break;
9784
9785 CASE_INT_FN (BUILT_IN_CLZ):
9786 target = expand_builtin_unop (target_mode, exp, target,
9787 subtarget, clz_optab);
9788 if (target)
9789 return target;
9790 break;
9791
9792 CASE_INT_FN (BUILT_IN_CTZ):
9793 target = expand_builtin_unop (target_mode, exp, target,
9794 subtarget, ctz_optab);
9795 if (target)
9796 return target;
9797 break;
9798
9799 CASE_INT_FN (BUILT_IN_CLRSB):
9800 target = expand_builtin_unop (target_mode, exp, target,
9801 subtarget, clrsb_optab);
9802 if (target)
9803 return target;
9804 break;
9805
9806 CASE_INT_FN (BUILT_IN_POPCOUNT):
9807 target = expand_builtin_unop (target_mode, exp, target,
9808 subtarget, popcount_optab);
9809 if (target)
9810 return target;
9811 break;
9812
9813 CASE_INT_FN (BUILT_IN_PARITY):
9814 target = expand_builtin_unop (target_mode, exp, target,
9815 subtarget, parity_optab);
9816 if (target)
9817 return target;
9818 break;
9819
9820 case BUILT_IN_STRLEN:
9821 target = expand_builtin_strlen (exp, target, target_mode);
9822 if (target)
9823 return target;
9824 break;
9825
9826 case BUILT_IN_STRNLEN:
9827 target = expand_builtin_strnlen (exp, target, target_mode);
9828 if (target)
9829 return target;
9830 break;
9831
9832 case BUILT_IN_STRCAT:
9833 target = expand_builtin_strcat (exp);
9834 if (target)
9835 return target;
9836 break;
9837
9838 case BUILT_IN_GETTEXT:
9839 case BUILT_IN_PUTS:
9840 case BUILT_IN_PUTS_UNLOCKED:
9841 case BUILT_IN_STRDUP:
9842 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9843 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9844 break;
9845
9846 case BUILT_IN_INDEX:
9847 case BUILT_IN_RINDEX:
9848 case BUILT_IN_STRCHR:
9849 case BUILT_IN_STRRCHR:
9850 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9851 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9852 break;
9853
9854 case BUILT_IN_FPUTS:
9855 case BUILT_IN_FPUTS_UNLOCKED:
9856 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9857 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9858 break;
9859
9860 case BUILT_IN_STRNDUP:
9861 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9862 check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1));
9863 break;
9864
9865 case BUILT_IN_STRCASECMP:
9866 case BUILT_IN_STRPBRK:
9867 case BUILT_IN_STRSPN:
9868 case BUILT_IN_STRCSPN:
9869 case BUILT_IN_STRSTR:
9870 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9871 {
9872 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9873 check_read_access (exp, CALL_EXPR_ARG (exp, 1));
9874 }
9875 break;
9876
9877 case BUILT_IN_STRCPY:
9878 target = expand_builtin_strcpy (exp, target);
9879 if (target)
9880 return target;
9881 break;
9882
9883 case BUILT_IN_STRNCAT:
9884 target = expand_builtin_strncat (exp, target);
9885 if (target)
9886 return target;
9887 break;
9888
9889 case BUILT_IN_STRNCPY:
9890 target = expand_builtin_strncpy (exp, target);
9891 if (target)
9892 return target;
9893 break;
9894
9895 case BUILT_IN_STPCPY:
9896 target = expand_builtin_stpcpy (exp, target, mode);
9897 if (target)
9898 return target;
9899 break;
9900
9901 case BUILT_IN_STPNCPY:
9902 target = expand_builtin_stpncpy (exp, target);
9903 if (target)
9904 return target;
9905 break;
9906
9907 case BUILT_IN_MEMCHR:
9908 target = expand_builtin_memchr (exp, target);
9909 if (target)
9910 return target;
9911 break;
9912
9913 case BUILT_IN_MEMCPY:
9914 target = expand_builtin_memcpy (exp, target);
9915 if (target)
9916 return target;
9917 break;
9918
9919 case BUILT_IN_MEMMOVE:
9920 target = expand_builtin_memmove (exp, target);
9921 if (target)
9922 return target;
9923 break;
9924
9925 case BUILT_IN_MEMPCPY:
9926 target = expand_builtin_mempcpy (exp, target);
9927 if (target)
9928 return target;
9929 break;
9930
9931 case BUILT_IN_MEMSET:
9932 target = expand_builtin_memset (exp, target, mode);
9933 if (target)
9934 return target;
9935 break;
9936
9937 case BUILT_IN_BZERO:
9938 target = expand_builtin_bzero (exp);
9939 if (target)
9940 return target;
9941 break;
9942
9943 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9944 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
9945 when changing it to a strcmp call. */
9946 case BUILT_IN_STRCMP_EQ:
9947 target = expand_builtin_memcmp (exp, target, true);
9948 if (target)
9949 return target;
9950
9951 /* Change this call back to a BUILT_IN_STRCMP. */
9952 TREE_OPERAND (exp, 1)
9953 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
9954
9955 /* Delete the last parameter. */
9956 unsigned int i;
9957 vec<tree, va_gc> *arg_vec;
9958 vec_alloc (arg_vec, 2);
9959 for (i = 0; i < 2; i++)
9960 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
9961 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
9962 /* FALLTHROUGH */
9963
9964 case BUILT_IN_STRCMP:
9965 target = expand_builtin_strcmp (exp, target);
9966 if (target)
9967 return target;
9968 break;
9969
9970 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9971 back to a BUILT_IN_STRNCMP. */
9972 case BUILT_IN_STRNCMP_EQ:
9973 target = expand_builtin_memcmp (exp, target, true);
9974 if (target)
9975 return target;
9976
9977 /* Change it back to a BUILT_IN_STRNCMP. */
9978 TREE_OPERAND (exp, 1)
9979 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
9980 /* FALLTHROUGH */
9981
9982 case BUILT_IN_STRNCMP:
9983 target = expand_builtin_strncmp (exp, target, mode);
9984 if (target)
9985 return target;
9986 break;
9987
9988 case BUILT_IN_BCMP:
9989 case BUILT_IN_MEMCMP:
9990 case BUILT_IN_MEMCMP_EQ:
9991 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
9992 if (target)
9993 return target;
9994 if (fcode == BUILT_IN_MEMCMP_EQ)
9995 {
9996 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
9997 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
9998 }
9999 break;
10000
10001 case BUILT_IN_SETJMP:
10002 /* This should have been lowered to the builtins below. */
10003 gcc_unreachable ();
10004
10005 case BUILT_IN_SETJMP_SETUP:
10006 /* __builtin_setjmp_setup is passed a pointer to an array of five words
10007 and the receiver label. */
10008 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
10009 {
10010 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10011 VOIDmode, EXPAND_NORMAL);
10012 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
10013 rtx_insn *label_r = label_rtx (label);
10014
10015 /* This is copied from the handling of non-local gotos. */
10016 expand_builtin_setjmp_setup (buf_addr, label_r);
10017 nonlocal_goto_handler_labels
10018 = gen_rtx_INSN_LIST (VOIDmode, label_r,
10019 nonlocal_goto_handler_labels);
10020 /* ??? Do not let expand_label treat us as such since we would
10021 not want to be both on the list of non-local labels and on
10022 the list of forced labels. */
10023 FORCED_LABEL (label) = 0;
10024 return const0_rtx;
10025 }
10026 break;
10027
10028 case BUILT_IN_SETJMP_RECEIVER:
10029 /* __builtin_setjmp_receiver is passed the receiver label. */
10030 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10031 {
10032 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
10033 rtx_insn *label_r = label_rtx (label);
10034
10035 expand_builtin_setjmp_receiver (label_r);
10036 return const0_rtx;
10037 }
10038 break;
10039
10040 /* __builtin_longjmp is passed a pointer to an array of five words.
10041 It's similar to the C library longjmp function but works with
10042 __builtin_setjmp above. */
10043 case BUILT_IN_LONGJMP:
10044 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10045 {
10046 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10047 VOIDmode, EXPAND_NORMAL);
10048 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
10049
10050 if (value != const1_rtx)
10051 {
10052 error ("%<__builtin_longjmp%> second argument must be 1");
10053 return const0_rtx;
10054 }
10055
10056 expand_builtin_longjmp (buf_addr, value);
10057 return const0_rtx;
10058 }
10059 break;
10060
10061 case BUILT_IN_NONLOCAL_GOTO:
10062 target = expand_builtin_nonlocal_goto (exp);
10063 if (target)
10064 return target;
10065 break;
10066
10067 /* This updates the setjmp buffer that is its argument with the value
10068 of the current stack pointer. */
10069 case BUILT_IN_UPDATE_SETJMP_BUF:
10070 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10071 {
10072 rtx buf_addr
10073 = expand_normal (CALL_EXPR_ARG (exp, 0));
10074
10075 expand_builtin_update_setjmp_buf (buf_addr);
10076 return const0_rtx;
10077 }
10078 break;
10079
10080 case BUILT_IN_TRAP:
10081 expand_builtin_trap ();
10082 return const0_rtx;
10083
10084 case BUILT_IN_UNREACHABLE:
10085 expand_builtin_unreachable ();
10086 return const0_rtx;
10087
10088 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10089 case BUILT_IN_SIGNBITD32:
10090 case BUILT_IN_SIGNBITD64:
10091 case BUILT_IN_SIGNBITD128:
10092 target = expand_builtin_signbit (exp, target);
10093 if (target)
10094 return target;
10095 break;
10096
10097 /* Various hooks for the DWARF 2 __throw routine. */
10098 case BUILT_IN_UNWIND_INIT:
10099 expand_builtin_unwind_init ();
10100 return const0_rtx;
10101 case BUILT_IN_DWARF_CFA:
10102 return virtual_cfa_rtx;
10103 #ifdef DWARF2_UNWIND_INFO
10104 case BUILT_IN_DWARF_SP_COLUMN:
10105 return expand_builtin_dwarf_sp_column ();
10106 case BUILT_IN_INIT_DWARF_REG_SIZES:
10107 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
10108 return const0_rtx;
10109 #endif
10110 case BUILT_IN_FROB_RETURN_ADDR:
10111 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
10112 case BUILT_IN_EXTRACT_RETURN_ADDR:
10113 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
10114 case BUILT_IN_EH_RETURN:
10115 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
10116 CALL_EXPR_ARG (exp, 1));
10117 return const0_rtx;
10118 case BUILT_IN_EH_RETURN_DATA_REGNO:
10119 return expand_builtin_eh_return_data_regno (exp);
10120 case BUILT_IN_EXTEND_POINTER:
10121 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
10122 case BUILT_IN_EH_POINTER:
10123 return expand_builtin_eh_pointer (exp);
10124 case BUILT_IN_EH_FILTER:
10125 return expand_builtin_eh_filter (exp);
10126 case BUILT_IN_EH_COPY_VALUES:
10127 return expand_builtin_eh_copy_values (exp);
10128
10129 case BUILT_IN_VA_START:
10130 return expand_builtin_va_start (exp);
10131 case BUILT_IN_VA_END:
10132 return expand_builtin_va_end (exp);
10133 case BUILT_IN_VA_COPY:
10134 return expand_builtin_va_copy (exp);
10135 case BUILT_IN_EXPECT:
10136 return expand_builtin_expect (exp, target);
10137 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10138 return expand_builtin_expect_with_probability (exp, target);
10139 case BUILT_IN_ASSUME_ALIGNED:
10140 return expand_builtin_assume_aligned (exp, target);
10141 case BUILT_IN_PREFETCH:
10142 expand_builtin_prefetch (exp);
10143 return const0_rtx;
10144
10145 case BUILT_IN_INIT_TRAMPOLINE:
10146 return expand_builtin_init_trampoline (exp, true);
10147 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
10148 return expand_builtin_init_trampoline (exp, false);
10149 case BUILT_IN_ADJUST_TRAMPOLINE:
10150 return expand_builtin_adjust_trampoline (exp);
10151
10152 case BUILT_IN_INIT_DESCRIPTOR:
10153 return expand_builtin_init_descriptor (exp);
10154 case BUILT_IN_ADJUST_DESCRIPTOR:
10155 return expand_builtin_adjust_descriptor (exp);
10156
10157 case BUILT_IN_FORK:
10158 case BUILT_IN_EXECL:
10159 case BUILT_IN_EXECV:
10160 case BUILT_IN_EXECLP:
10161 case BUILT_IN_EXECLE:
10162 case BUILT_IN_EXECVP:
10163 case BUILT_IN_EXECVE:
10164 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
10165 if (target)
10166 return target;
10167 break;
10168
10169 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
10170 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
10171 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
10172 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
10173 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
10174 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
10175 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
10176 if (target)
10177 return target;
10178 break;
10179
10180 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
10181 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
10182 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
10183 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
10184 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
10185 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
10186 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
10187 if (target)
10188 return target;
10189 break;
10190
10191 case BUILT_IN_SYNC_FETCH_AND_OR_1:
10192 case BUILT_IN_SYNC_FETCH_AND_OR_2:
10193 case BUILT_IN_SYNC_FETCH_AND_OR_4:
10194 case BUILT_IN_SYNC_FETCH_AND_OR_8:
10195 case BUILT_IN_SYNC_FETCH_AND_OR_16:
10196 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
10197 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
10198 if (target)
10199 return target;
10200 break;
10201
10202 case BUILT_IN_SYNC_FETCH_AND_AND_1:
10203 case BUILT_IN_SYNC_FETCH_AND_AND_2:
10204 case BUILT_IN_SYNC_FETCH_AND_AND_4:
10205 case BUILT_IN_SYNC_FETCH_AND_AND_8:
10206 case BUILT_IN_SYNC_FETCH_AND_AND_16:
10207 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
10208 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
10209 if (target)
10210 return target;
10211 break;
10212
10213 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
10214 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
10215 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
10216 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
10217 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
10218 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
10219 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
10220 if (target)
10221 return target;
10222 break;
10223
10224 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
10225 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
10226 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
10227 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
10228 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
10229 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
10230 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
10231 if (target)
10232 return target;
10233 break;
10234
10235 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
10236 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
10237 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
10238 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
10239 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
10240 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
10241 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
10242 if (target)
10243 return target;
10244 break;
10245
10246 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
10247 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
10248 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
10249 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
10250 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
10251 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
10252 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
10253 if (target)
10254 return target;
10255 break;
10256
10257 case BUILT_IN_SYNC_OR_AND_FETCH_1:
10258 case BUILT_IN_SYNC_OR_AND_FETCH_2:
10259 case BUILT_IN_SYNC_OR_AND_FETCH_4:
10260 case BUILT_IN_SYNC_OR_AND_FETCH_8:
10261 case BUILT_IN_SYNC_OR_AND_FETCH_16:
10262 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
10263 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
10264 if (target)
10265 return target;
10266 break;
10267
10268 case BUILT_IN_SYNC_AND_AND_FETCH_1:
10269 case BUILT_IN_SYNC_AND_AND_FETCH_2:
10270 case BUILT_IN_SYNC_AND_AND_FETCH_4:
10271 case BUILT_IN_SYNC_AND_AND_FETCH_8:
10272 case BUILT_IN_SYNC_AND_AND_FETCH_16:
10273 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
10274 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
10275 if (target)
10276 return target;
10277 break;
10278
10279 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
10280 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
10281 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
10282 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
10283 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
10284 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
10285 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
10286 if (target)
10287 return target;
10288 break;
10289
10290 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
10291 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
10292 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
10293 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
10294 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
10295 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
10296 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
10297 if (target)
10298 return target;
10299 break;
10300
10301 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
10302 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
10303 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
10304 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
10305 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
10306 if (mode == VOIDmode)
10307 mode = TYPE_MODE (boolean_type_node);
10308 if (!target || !register_operand (target, mode))
10309 target = gen_reg_rtx (mode);
10310
10311 mode = get_builtin_sync_mode
10312 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
10313 target = expand_builtin_compare_and_swap (mode, exp, true, target);
10314 if (target)
10315 return target;
10316 break;
10317
10318 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
10319 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
10320 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
10321 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
10322 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
10323 mode = get_builtin_sync_mode
10324 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
10325 target = expand_builtin_compare_and_swap (mode, exp, false, target);
10326 if (target)
10327 return target;
10328 break;
10329
10330 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
10331 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
10332 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
10333 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
10334 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
10335 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
10336 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
10337 if (target)
10338 return target;
10339 break;
10340
10341 case BUILT_IN_SYNC_LOCK_RELEASE_1:
10342 case BUILT_IN_SYNC_LOCK_RELEASE_2:
10343 case BUILT_IN_SYNC_LOCK_RELEASE_4:
10344 case BUILT_IN_SYNC_LOCK_RELEASE_8:
10345 case BUILT_IN_SYNC_LOCK_RELEASE_16:
10346 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
10347 expand_builtin_sync_lock_release (mode, exp);
10348 return const0_rtx;
10349
10350 case BUILT_IN_SYNC_SYNCHRONIZE:
10351 expand_builtin_sync_synchronize ();
10352 return const0_rtx;
10353
10354 case BUILT_IN_ATOMIC_EXCHANGE_1:
10355 case BUILT_IN_ATOMIC_EXCHANGE_2:
10356 case BUILT_IN_ATOMIC_EXCHANGE_4:
10357 case BUILT_IN_ATOMIC_EXCHANGE_8:
10358 case BUILT_IN_ATOMIC_EXCHANGE_16:
10359 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
10360 target = expand_builtin_atomic_exchange (mode, exp, target);
10361 if (target)
10362 return target;
10363 break;
10364
10365 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
10366 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
10367 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
10368 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
10369 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
10370 {
10371 unsigned int nargs, z;
10372 vec<tree, va_gc> *vec;
10373
10374 mode =
10375 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
10376 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
10377 if (target)
10378 return target;
10379
10380 /* If this is turned into an external library call, the weak parameter
10381 must be dropped to match the expected parameter list. */
10382 nargs = call_expr_nargs (exp);
10383 vec_alloc (vec, nargs - 1);
10384 for (z = 0; z < 3; z++)
10385 vec->quick_push (CALL_EXPR_ARG (exp, z));
10386 /* Skip the boolean weak parameter. */
10387 for (z = 4; z < 6; z++)
10388 vec->quick_push (CALL_EXPR_ARG (exp, z));
10389 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
10390 break;
10391 }
10392
10393 case BUILT_IN_ATOMIC_LOAD_1:
10394 case BUILT_IN_ATOMIC_LOAD_2:
10395 case BUILT_IN_ATOMIC_LOAD_4:
10396 case BUILT_IN_ATOMIC_LOAD_8:
10397 case BUILT_IN_ATOMIC_LOAD_16:
10398 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
10399 target = expand_builtin_atomic_load (mode, exp, target);
10400 if (target)
10401 return target;
10402 break;
10403
10404 case BUILT_IN_ATOMIC_STORE_1:
10405 case BUILT_IN_ATOMIC_STORE_2:
10406 case BUILT_IN_ATOMIC_STORE_4:
10407 case BUILT_IN_ATOMIC_STORE_8:
10408 case BUILT_IN_ATOMIC_STORE_16:
10409 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
10410 target = expand_builtin_atomic_store (mode, exp);
10411 if (target)
10412 return const0_rtx;
10413 break;
10414
10415 case BUILT_IN_ATOMIC_ADD_FETCH_1:
10416 case BUILT_IN_ATOMIC_ADD_FETCH_2:
10417 case BUILT_IN_ATOMIC_ADD_FETCH_4:
10418 case BUILT_IN_ATOMIC_ADD_FETCH_8:
10419 case BUILT_IN_ATOMIC_ADD_FETCH_16:
10420 {
10421 enum built_in_function lib;
10422 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
10423 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
10424 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
10425 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
10426 ignore, lib);
10427 if (target)
10428 return target;
10429 break;
10430 }
10431 case BUILT_IN_ATOMIC_SUB_FETCH_1:
10432 case BUILT_IN_ATOMIC_SUB_FETCH_2:
10433 case BUILT_IN_ATOMIC_SUB_FETCH_4:
10434 case BUILT_IN_ATOMIC_SUB_FETCH_8:
10435 case BUILT_IN_ATOMIC_SUB_FETCH_16:
10436 {
10437 enum built_in_function lib;
10438 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
10439 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
10440 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
10441 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
10442 ignore, lib);
10443 if (target)
10444 return target;
10445 break;
10446 }
10447 case BUILT_IN_ATOMIC_AND_FETCH_1:
10448 case BUILT_IN_ATOMIC_AND_FETCH_2:
10449 case BUILT_IN_ATOMIC_AND_FETCH_4:
10450 case BUILT_IN_ATOMIC_AND_FETCH_8:
10451 case BUILT_IN_ATOMIC_AND_FETCH_16:
10452 {
10453 enum built_in_function lib;
10454 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
10455 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
10456 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
10457 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
10458 ignore, lib);
10459 if (target)
10460 return target;
10461 break;
10462 }
10463 case BUILT_IN_ATOMIC_NAND_FETCH_1:
10464 case BUILT_IN_ATOMIC_NAND_FETCH_2:
10465 case BUILT_IN_ATOMIC_NAND_FETCH_4:
10466 case BUILT_IN_ATOMIC_NAND_FETCH_8:
10467 case BUILT_IN_ATOMIC_NAND_FETCH_16:
10468 {
10469 enum built_in_function lib;
10470 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
10471 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
10472 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
10473 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
10474 ignore, lib);
10475 if (target)
10476 return target;
10477 break;
10478 }
10479 case BUILT_IN_ATOMIC_XOR_FETCH_1:
10480 case BUILT_IN_ATOMIC_XOR_FETCH_2:
10481 case BUILT_IN_ATOMIC_XOR_FETCH_4:
10482 case BUILT_IN_ATOMIC_XOR_FETCH_8:
10483 case BUILT_IN_ATOMIC_XOR_FETCH_16:
10484 {
10485 enum built_in_function lib;
10486 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
10487 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
10488 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
10489 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
10490 ignore, lib);
10491 if (target)
10492 return target;
10493 break;
10494 }
10495 case BUILT_IN_ATOMIC_OR_FETCH_1:
10496 case BUILT_IN_ATOMIC_OR_FETCH_2:
10497 case BUILT_IN_ATOMIC_OR_FETCH_4:
10498 case BUILT_IN_ATOMIC_OR_FETCH_8:
10499 case BUILT_IN_ATOMIC_OR_FETCH_16:
10500 {
10501 enum built_in_function lib;
10502 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
10503 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
10504 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
10505 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
10506 ignore, lib);
10507 if (target)
10508 return target;
10509 break;
10510 }
10511 case BUILT_IN_ATOMIC_FETCH_ADD_1:
10512 case BUILT_IN_ATOMIC_FETCH_ADD_2:
10513 case BUILT_IN_ATOMIC_FETCH_ADD_4:
10514 case BUILT_IN_ATOMIC_FETCH_ADD_8:
10515 case BUILT_IN_ATOMIC_FETCH_ADD_16:
10516 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
10517 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
10518 ignore, BUILT_IN_NONE);
10519 if (target)
10520 return target;
10521 break;
10522
10523 case BUILT_IN_ATOMIC_FETCH_SUB_1:
10524 case BUILT_IN_ATOMIC_FETCH_SUB_2:
10525 case BUILT_IN_ATOMIC_FETCH_SUB_4:
10526 case BUILT_IN_ATOMIC_FETCH_SUB_8:
10527 case BUILT_IN_ATOMIC_FETCH_SUB_16:
10528 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
10529 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
10530 ignore, BUILT_IN_NONE);
10531 if (target)
10532 return target;
10533 break;
10534
10535 case BUILT_IN_ATOMIC_FETCH_AND_1:
10536 case BUILT_IN_ATOMIC_FETCH_AND_2:
10537 case BUILT_IN_ATOMIC_FETCH_AND_4:
10538 case BUILT_IN_ATOMIC_FETCH_AND_8:
10539 case BUILT_IN_ATOMIC_FETCH_AND_16:
10540 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
10541 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
10542 ignore, BUILT_IN_NONE);
10543 if (target)
10544 return target;
10545 break;
10546
10547 case BUILT_IN_ATOMIC_FETCH_NAND_1:
10548 case BUILT_IN_ATOMIC_FETCH_NAND_2:
10549 case BUILT_IN_ATOMIC_FETCH_NAND_4:
10550 case BUILT_IN_ATOMIC_FETCH_NAND_8:
10551 case BUILT_IN_ATOMIC_FETCH_NAND_16:
10552 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
10553 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
10554 ignore, BUILT_IN_NONE);
10555 if (target)
10556 return target;
10557 break;
10558
10559 case BUILT_IN_ATOMIC_FETCH_XOR_1:
10560 case BUILT_IN_ATOMIC_FETCH_XOR_2:
10561 case BUILT_IN_ATOMIC_FETCH_XOR_4:
10562 case BUILT_IN_ATOMIC_FETCH_XOR_8:
10563 case BUILT_IN_ATOMIC_FETCH_XOR_16:
10564 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
10565 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
10566 ignore, BUILT_IN_NONE);
10567 if (target)
10568 return target;
10569 break;
10570
10571 case BUILT_IN_ATOMIC_FETCH_OR_1:
10572 case BUILT_IN_ATOMIC_FETCH_OR_2:
10573 case BUILT_IN_ATOMIC_FETCH_OR_4:
10574 case BUILT_IN_ATOMIC_FETCH_OR_8:
10575 case BUILT_IN_ATOMIC_FETCH_OR_16:
10576 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
10577 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
10578 ignore, BUILT_IN_NONE);
10579 if (target)
10580 return target;
10581 break;
10582
10583 case BUILT_IN_ATOMIC_TEST_AND_SET:
10584 return expand_builtin_atomic_test_and_set (exp, target);
10585
10586 case BUILT_IN_ATOMIC_CLEAR:
10587 return expand_builtin_atomic_clear (exp);
10588
10589 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10590 return expand_builtin_atomic_always_lock_free (exp);
10591
10592 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10593 target = expand_builtin_atomic_is_lock_free (exp);
10594 if (target)
10595 return target;
10596 break;
10597
10598 case BUILT_IN_ATOMIC_THREAD_FENCE:
10599 expand_builtin_atomic_thread_fence (exp);
10600 return const0_rtx;
10601
10602 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
10603 expand_builtin_atomic_signal_fence (exp);
10604 return const0_rtx;
10605
10606 case BUILT_IN_OBJECT_SIZE:
10607 return expand_builtin_object_size (exp);
10608
10609 case BUILT_IN_MEMCPY_CHK:
10610 case BUILT_IN_MEMPCPY_CHK:
10611 case BUILT_IN_MEMMOVE_CHK:
10612 case BUILT_IN_MEMSET_CHK:
10613 target = expand_builtin_memory_chk (exp, target, mode, fcode);
10614 if (target)
10615 return target;
10616 break;
10617
10618 case BUILT_IN_STRCPY_CHK:
10619 case BUILT_IN_STPCPY_CHK:
10620 case BUILT_IN_STRNCPY_CHK:
10621 case BUILT_IN_STPNCPY_CHK:
10622 case BUILT_IN_STRCAT_CHK:
10623 case BUILT_IN_STRNCAT_CHK:
10624 case BUILT_IN_SNPRINTF_CHK:
10625 case BUILT_IN_VSNPRINTF_CHK:
10626 maybe_emit_chk_warning (exp, fcode);
10627 break;
10628
10629 case BUILT_IN_SPRINTF_CHK:
10630 case BUILT_IN_VSPRINTF_CHK:
10631 maybe_emit_sprintf_chk_warning (exp, fcode);
10632 break;
10633
10634 case BUILT_IN_THREAD_POINTER:
10635 return expand_builtin_thread_pointer (exp, target);
10636
10637 case BUILT_IN_SET_THREAD_POINTER:
10638 expand_builtin_set_thread_pointer (exp);
10639 return const0_rtx;
10640
10641 case BUILT_IN_ACC_ON_DEVICE:
10642 /* Do library call, if we failed to expand the builtin when
10643 folding. */
10644 break;
10645
10646 case BUILT_IN_GOACC_PARLEVEL_ID:
10647 case BUILT_IN_GOACC_PARLEVEL_SIZE:
10648 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
10649
10650 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
10651 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
10652
10653 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
10654 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
10655 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
10656 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
10657 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
10658 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
10659 return expand_speculation_safe_value (mode, exp, target, ignore);
10660
10661 default: /* just do library call, if unknown builtin */
10662 break;
10663 }
10664
10665 /* The switch statement above can drop through to cause the function
10666 to be called normally. */
10667 return expand_call (exp, target, ignore);
10668 }
10669
10670 /* Determine whether a tree node represents a call to a built-in
10671 function. If the tree T is a call to a built-in function with
10672 the right number of arguments of the appropriate types, return
10673 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
10674 Otherwise the return value is END_BUILTINS. */
10675
10676 enum built_in_function
10677 builtin_mathfn_code (const_tree t)
10678 {
10679 const_tree fndecl, arg, parmlist;
10680 const_tree argtype, parmtype;
10681 const_call_expr_arg_iterator iter;
10682
10683 if (TREE_CODE (t) != CALL_EXPR)
10684 return END_BUILTINS;
10685
10686 fndecl = get_callee_fndecl (t);
10687 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10688 return END_BUILTINS;
10689
10690 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
10691 init_const_call_expr_arg_iterator (t, &iter);
10692 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
10693 {
10694 /* If a function doesn't take a variable number of arguments,
10695 the last element in the list will have type `void'. */
10696 parmtype = TREE_VALUE (parmlist);
10697 if (VOID_TYPE_P (parmtype))
10698 {
10699 if (more_const_call_expr_args_p (&iter))
10700 return END_BUILTINS;
10701 return DECL_FUNCTION_CODE (fndecl);
10702 }
10703
10704 if (! more_const_call_expr_args_p (&iter))
10705 return END_BUILTINS;
10706
10707 arg = next_const_call_expr_arg (&iter);
10708 argtype = TREE_TYPE (arg);
10709
10710 if (SCALAR_FLOAT_TYPE_P (parmtype))
10711 {
10712 if (! SCALAR_FLOAT_TYPE_P (argtype))
10713 return END_BUILTINS;
10714 }
10715 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
10716 {
10717 if (! COMPLEX_FLOAT_TYPE_P (argtype))
10718 return END_BUILTINS;
10719 }
10720 else if (POINTER_TYPE_P (parmtype))
10721 {
10722 if (! POINTER_TYPE_P (argtype))
10723 return END_BUILTINS;
10724 }
10725 else if (INTEGRAL_TYPE_P (parmtype))
10726 {
10727 if (! INTEGRAL_TYPE_P (argtype))
10728 return END_BUILTINS;
10729 }
10730 else
10731 return END_BUILTINS;
10732 }
10733
10734 /* Variable-length argument list. */
10735 return DECL_FUNCTION_CODE (fndecl);
10736 }
10737
10738 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
10739 evaluate to a constant. */
10740
10741 static tree
10742 fold_builtin_constant_p (tree arg)
10743 {
10744 /* We return 1 for a numeric type that's known to be a constant
10745 value at compile-time or for an aggregate type that's a
10746 literal constant. */
10747 STRIP_NOPS (arg);
10748
10749 /* If we know this is a constant, emit the constant of one. */
10750 if (CONSTANT_CLASS_P (arg)
10751 || (TREE_CODE (arg) == CONSTRUCTOR
10752 && TREE_CONSTANT (arg)))
10753 return integer_one_node;
10754 if (TREE_CODE (arg) == ADDR_EXPR)
10755 {
10756 tree op = TREE_OPERAND (arg, 0);
10757 if (TREE_CODE (op) == STRING_CST
10758 || (TREE_CODE (op) == ARRAY_REF
10759 && integer_zerop (TREE_OPERAND (op, 1))
10760 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
10761 return integer_one_node;
10762 }
10763
10764 /* If this expression has side effects, show we don't know it to be a
10765 constant. Likewise if it's a pointer or aggregate type since in
10766 those case we only want literals, since those are only optimized
10767 when generating RTL, not later.
10768 And finally, if we are compiling an initializer, not code, we
10769 need to return a definite result now; there's not going to be any
10770 more optimization done. */
10771 if (TREE_SIDE_EFFECTS (arg)
10772 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
10773 || POINTER_TYPE_P (TREE_TYPE (arg))
10774 || cfun == 0
10775 || folding_initializer
10776 || force_folding_builtin_constant_p)
10777 return integer_zero_node;
10778
10779 return NULL_TREE;
10780 }
10781
10782 /* Create builtin_expect or builtin_expect_with_probability
10783 with PRED and EXPECTED as its arguments and return it as a truthvalue.
10784 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
10785 builtin_expect_with_probability instead uses third argument as PROBABILITY
10786 value. */
10787
10788 static tree
10789 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
10790 tree predictor, tree probability)
10791 {
10792 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
10793
10794 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
10795 : BUILT_IN_EXPECT_WITH_PROBABILITY);
10796 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
10797 ret_type = TREE_TYPE (TREE_TYPE (fn));
10798 pred_type = TREE_VALUE (arg_types);
10799 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
10800
10801 pred = fold_convert_loc (loc, pred_type, pred);
10802 expected = fold_convert_loc (loc, expected_type, expected);
10803
10804 if (probability)
10805 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
10806 else
10807 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
10808 predictor);
10809
10810 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
10811 build_int_cst (ret_type, 0));
10812 }
10813
10814 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
10815 NULL_TREE if no simplification is possible. */
10816
10817 tree
10818 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
10819 tree arg3)
10820 {
10821 tree inner, fndecl, inner_arg0;
10822 enum tree_code code;
10823
10824 /* Distribute the expected value over short-circuiting operators.
10825 See through the cast from truthvalue_type_node to long. */
10826 inner_arg0 = arg0;
10827 while (CONVERT_EXPR_P (inner_arg0)
10828 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
10829 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
10830 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
10831
10832 /* If this is a builtin_expect within a builtin_expect keep the
10833 inner one. See through a comparison against a constant. It
10834 might have been added to create a thruthvalue. */
10835 inner = inner_arg0;
10836
10837 if (COMPARISON_CLASS_P (inner)
10838 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
10839 inner = TREE_OPERAND (inner, 0);
10840
10841 if (TREE_CODE (inner) == CALL_EXPR
10842 && (fndecl = get_callee_fndecl (inner))
10843 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
10844 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
10845 return arg0;
10846
10847 inner = inner_arg0;
10848 code = TREE_CODE (inner);
10849 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
10850 {
10851 tree op0 = TREE_OPERAND (inner, 0);
10852 tree op1 = TREE_OPERAND (inner, 1);
10853 arg1 = save_expr (arg1);
10854
10855 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
10856 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
10857 inner = build2 (code, TREE_TYPE (inner), op0, op1);
10858
10859 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
10860 }
10861
10862 /* If the argument isn't invariant then there's nothing else we can do. */
10863 if (!TREE_CONSTANT (inner_arg0))
10864 return NULL_TREE;
10865
10866 /* If we expect that a comparison against the argument will fold to
10867 a constant return the constant. In practice, this means a true
10868 constant or the address of a non-weak symbol. */
10869 inner = inner_arg0;
10870 STRIP_NOPS (inner);
10871 if (TREE_CODE (inner) == ADDR_EXPR)
10872 {
10873 do
10874 {
10875 inner = TREE_OPERAND (inner, 0);
10876 }
10877 while (TREE_CODE (inner) == COMPONENT_REF
10878 || TREE_CODE (inner) == ARRAY_REF);
10879 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
10880 return NULL_TREE;
10881 }
10882
10883 /* Otherwise, ARG0 already has the proper type for the return value. */
10884 return arg0;
10885 }
10886
10887 /* Fold a call to __builtin_classify_type with argument ARG. */
10888
10889 static tree
10890 fold_builtin_classify_type (tree arg)
10891 {
10892 if (arg == 0)
10893 return build_int_cst (integer_type_node, no_type_class);
10894
10895 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
10896 }
10897
10898 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
10899 ARG. */
10900
10901 static tree
10902 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
10903 {
10904 if (!validate_arg (arg, POINTER_TYPE))
10905 return NULL_TREE;
10906 else
10907 {
10908 c_strlen_data lendata = { };
10909 tree len = c_strlen (arg, 0, &lendata);
10910
10911 if (len)
10912 return fold_convert_loc (loc, type, len);
10913
10914 if (!lendata.decl)
10915 c_strlen (arg, 1, &lendata);
10916
10917 if (lendata.decl)
10918 {
10919 if (EXPR_HAS_LOCATION (arg))
10920 loc = EXPR_LOCATION (arg);
10921 else if (loc == UNKNOWN_LOCATION)
10922 loc = input_location;
10923 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
10924 }
10925
10926 return NULL_TREE;
10927 }
10928 }
10929
10930 /* Fold a call to __builtin_inf or __builtin_huge_val. */
10931
10932 static tree
10933 fold_builtin_inf (location_t loc, tree type, int warn)
10934 {
10935 REAL_VALUE_TYPE real;
10936
10937 /* __builtin_inff is intended to be usable to define INFINITY on all
10938 targets. If an infinity is not available, INFINITY expands "to a
10939 positive constant of type float that overflows at translation
10940 time", footnote "In this case, using INFINITY will violate the
10941 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
10942 Thus we pedwarn to ensure this constraint violation is
10943 diagnosed. */
10944 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
10945 pedwarn (loc, 0, "target format does not support infinity");
10946
10947 real_inf (&real);
10948 return build_real (type, real);
10949 }
10950
10951 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
10952 NULL_TREE if no simplification can be made. */
10953
10954 static tree
10955 fold_builtin_sincos (location_t loc,
10956 tree arg0, tree arg1, tree arg2)
10957 {
10958 tree type;
10959 tree fndecl, call = NULL_TREE;
10960
10961 if (!validate_arg (arg0, REAL_TYPE)
10962 || !validate_arg (arg1, POINTER_TYPE)
10963 || !validate_arg (arg2, POINTER_TYPE))
10964 return NULL_TREE;
10965
10966 type = TREE_TYPE (arg0);
10967
10968 /* Calculate the result when the argument is a constant. */
10969 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
10970 if (fn == END_BUILTINS)
10971 return NULL_TREE;
10972
10973 /* Canonicalize sincos to cexpi. */
10974 if (TREE_CODE (arg0) == REAL_CST)
10975 {
10976 tree complex_type = build_complex_type (type);
10977 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
10978 }
10979 if (!call)
10980 {
10981 if (!targetm.libc_has_function (function_c99_math_complex, type)
10982 || !builtin_decl_implicit_p (fn))
10983 return NULL_TREE;
10984 fndecl = builtin_decl_explicit (fn);
10985 call = build_call_expr_loc (loc, fndecl, 1, arg0);
10986 call = builtin_save_expr (call);
10987 }
10988
10989 tree ptype = build_pointer_type (type);
10990 arg1 = fold_convert (ptype, arg1);
10991 arg2 = fold_convert (ptype, arg2);
10992 return build2 (COMPOUND_EXPR, void_type_node,
10993 build2 (MODIFY_EXPR, void_type_node,
10994 build_fold_indirect_ref_loc (loc, arg1),
10995 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
10996 build2 (MODIFY_EXPR, void_type_node,
10997 build_fold_indirect_ref_loc (loc, arg2),
10998 fold_build1_loc (loc, REALPART_EXPR, type, call)));
10999 }
11000
11001 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
11002 Return NULL_TREE if no simplification can be made. */
11003
11004 static tree
11005 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
11006 {
11007 if (!validate_arg (arg1, POINTER_TYPE)
11008 || !validate_arg (arg2, POINTER_TYPE)
11009 || !validate_arg (len, INTEGER_TYPE))
11010 return NULL_TREE;
11011
11012 /* If the LEN parameter is zero, return zero. */
11013 if (integer_zerop (len))
11014 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
11015 arg1, arg2);
11016
11017 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
11018 if (operand_equal_p (arg1, arg2, 0))
11019 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
11020
11021 /* If len parameter is one, return an expression corresponding to
11022 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
11023 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
11024 {
11025 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
11026 tree cst_uchar_ptr_node
11027 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
11028
11029 tree ind1
11030 = fold_convert_loc (loc, integer_type_node,
11031 build1 (INDIRECT_REF, cst_uchar_node,
11032 fold_convert_loc (loc,
11033 cst_uchar_ptr_node,
11034 arg1)));
11035 tree ind2
11036 = fold_convert_loc (loc, integer_type_node,
11037 build1 (INDIRECT_REF, cst_uchar_node,
11038 fold_convert_loc (loc,
11039 cst_uchar_ptr_node,
11040 arg2)));
11041 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
11042 }
11043
11044 return NULL_TREE;
11045 }
11046
11047 /* Fold a call to builtin isascii with argument ARG. */
11048
11049 static tree
11050 fold_builtin_isascii (location_t loc, tree arg)
11051 {
11052 if (!validate_arg (arg, INTEGER_TYPE))
11053 return NULL_TREE;
11054 else
11055 {
11056 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
11057 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
11058 build_int_cst (integer_type_node,
11059 ~ (unsigned HOST_WIDE_INT) 0x7f));
11060 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
11061 arg, integer_zero_node);
11062 }
11063 }
11064
11065 /* Fold a call to builtin toascii with argument ARG. */
11066
11067 static tree
11068 fold_builtin_toascii (location_t loc, tree arg)
11069 {
11070 if (!validate_arg (arg, INTEGER_TYPE))
11071 return NULL_TREE;
11072
11073 /* Transform toascii(c) -> (c & 0x7f). */
11074 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
11075 build_int_cst (integer_type_node, 0x7f));
11076 }
11077
11078 /* Fold a call to builtin isdigit with argument ARG. */
11079
11080 static tree
11081 fold_builtin_isdigit (location_t loc, tree arg)
11082 {
11083 if (!validate_arg (arg, INTEGER_TYPE))
11084 return NULL_TREE;
11085 else
11086 {
11087 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
11088 /* According to the C standard, isdigit is unaffected by locale.
11089 However, it definitely is affected by the target character set. */
11090 unsigned HOST_WIDE_INT target_digit0
11091 = lang_hooks.to_target_charset ('0');
11092
11093 if (target_digit0 == 0)
11094 return NULL_TREE;
11095
11096 arg = fold_convert_loc (loc, unsigned_type_node, arg);
11097 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
11098 build_int_cst (unsigned_type_node, target_digit0));
11099 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
11100 build_int_cst (unsigned_type_node, 9));
11101 }
11102 }
11103
11104 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
11105
11106 static tree
11107 fold_builtin_fabs (location_t loc, tree arg, tree type)
11108 {
11109 if (!validate_arg (arg, REAL_TYPE))
11110 return NULL_TREE;
11111
11112 arg = fold_convert_loc (loc, type, arg);
11113 return fold_build1_loc (loc, ABS_EXPR, type, arg);
11114 }
11115
11116 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
11117
11118 static tree
11119 fold_builtin_abs (location_t loc, tree arg, tree type)
11120 {
11121 if (!validate_arg (arg, INTEGER_TYPE))
11122 return NULL_TREE;
11123
11124 arg = fold_convert_loc (loc, type, arg);
11125 return fold_build1_loc (loc, ABS_EXPR, type, arg);
11126 }
11127
11128 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
11129
11130 static tree
11131 fold_builtin_carg (location_t loc, tree arg, tree type)
11132 {
11133 if (validate_arg (arg, COMPLEX_TYPE)
11134 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
11135 {
11136 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
11137
11138 if (atan2_fn)
11139 {
11140 tree new_arg = builtin_save_expr (arg);
11141 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
11142 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
11143 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
11144 }
11145 }
11146
11147 return NULL_TREE;
11148 }
11149
11150 /* Fold a call to builtin frexp, we can assume the base is 2. */
11151
11152 static tree
11153 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
11154 {
11155 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11156 return NULL_TREE;
11157
11158 STRIP_NOPS (arg0);
11159
11160 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11161 return NULL_TREE;
11162
11163 arg1 = build_fold_indirect_ref_loc (loc, arg1);
11164
11165 /* Proceed if a valid pointer type was passed in. */
11166 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
11167 {
11168 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11169 tree frac, exp;
11170
11171 switch (value->cl)
11172 {
11173 case rvc_zero:
11174 /* For +-0, return (*exp = 0, +-0). */
11175 exp = integer_zero_node;
11176 frac = arg0;
11177 break;
11178 case rvc_nan:
11179 case rvc_inf:
11180 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
11181 return omit_one_operand_loc (loc, rettype, arg0, arg1);
11182 case rvc_normal:
11183 {
11184 /* Since the frexp function always expects base 2, and in
11185 GCC normalized significands are already in the range
11186 [0.5, 1.0), we have exactly what frexp wants. */
11187 REAL_VALUE_TYPE frac_rvt = *value;
11188 SET_REAL_EXP (&frac_rvt, 0);
11189 frac = build_real (rettype, frac_rvt);
11190 exp = build_int_cst (integer_type_node, REAL_EXP (value));
11191 }
11192 break;
11193 default:
11194 gcc_unreachable ();
11195 }
11196
11197 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11198 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
11199 TREE_SIDE_EFFECTS (arg1) = 1;
11200 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
11201 }
11202
11203 return NULL_TREE;
11204 }
11205
11206 /* Fold a call to builtin modf. */
11207
11208 static tree
11209 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
11210 {
11211 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11212 return NULL_TREE;
11213
11214 STRIP_NOPS (arg0);
11215
11216 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11217 return NULL_TREE;
11218
11219 arg1 = build_fold_indirect_ref_loc (loc, arg1);
11220
11221 /* Proceed if a valid pointer type was passed in. */
11222 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
11223 {
11224 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11225 REAL_VALUE_TYPE trunc, frac;
11226
11227 switch (value->cl)
11228 {
11229 case rvc_nan:
11230 case rvc_zero:
11231 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
11232 trunc = frac = *value;
11233 break;
11234 case rvc_inf:
11235 /* For +-Inf, return (*arg1 = arg0, +-0). */
11236 frac = dconst0;
11237 frac.sign = value->sign;
11238 trunc = *value;
11239 break;
11240 case rvc_normal:
11241 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
11242 real_trunc (&trunc, VOIDmode, value);
11243 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
11244 /* If the original number was negative and already
11245 integral, then the fractional part is -0.0. */
11246 if (value->sign && frac.cl == rvc_zero)
11247 frac.sign = value->sign;
11248 break;
11249 }
11250
11251 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11252 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
11253 build_real (rettype, trunc));
11254 TREE_SIDE_EFFECTS (arg1) = 1;
11255 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
11256 build_real (rettype, frac));
11257 }
11258
11259 return NULL_TREE;
11260 }
11261
11262 /* Given a location LOC, an interclass builtin function decl FNDECL
11263 and its single argument ARG, return an folded expression computing
11264 the same, or NULL_TREE if we either couldn't or didn't want to fold
11265 (the latter happen if there's an RTL instruction available). */
11266
11267 static tree
11268 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
11269 {
11270 machine_mode mode;
11271
11272 if (!validate_arg (arg, REAL_TYPE))
11273 return NULL_TREE;
11274
11275 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
11276 return NULL_TREE;
11277
11278 mode = TYPE_MODE (TREE_TYPE (arg));
11279
11280 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
11281
11282 /* If there is no optab, try generic code. */
11283 switch (DECL_FUNCTION_CODE (fndecl))
11284 {
11285 tree result;
11286
11287 CASE_FLT_FN (BUILT_IN_ISINF):
11288 {
11289 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
11290 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11291 tree type = TREE_TYPE (arg);
11292 REAL_VALUE_TYPE r;
11293 char buf[128];
11294
11295 if (is_ibm_extended)
11296 {
11297 /* NaN and Inf are encoded in the high-order double value
11298 only. The low-order value is not significant. */
11299 type = double_type_node;
11300 mode = DFmode;
11301 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11302 }
11303 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11304 real_from_string (&r, buf);
11305 result = build_call_expr (isgr_fn, 2,
11306 fold_build1_loc (loc, ABS_EXPR, type, arg),
11307 build_real (type, r));
11308 return result;
11309 }
11310 CASE_FLT_FN (BUILT_IN_FINITE):
11311 case BUILT_IN_ISFINITE:
11312 {
11313 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
11314 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11315 tree type = TREE_TYPE (arg);
11316 REAL_VALUE_TYPE r;
11317 char buf[128];
11318
11319 if (is_ibm_extended)
11320 {
11321 /* NaN and Inf are encoded in the high-order double value
11322 only. The low-order value is not significant. */
11323 type = double_type_node;
11324 mode = DFmode;
11325 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11326 }
11327 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11328 real_from_string (&r, buf);
11329 result = build_call_expr (isle_fn, 2,
11330 fold_build1_loc (loc, ABS_EXPR, type, arg),
11331 build_real (type, r));
11332 /*result = fold_build2_loc (loc, UNGT_EXPR,
11333 TREE_TYPE (TREE_TYPE (fndecl)),
11334 fold_build1_loc (loc, ABS_EXPR, type, arg),
11335 build_real (type, r));
11336 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
11337 TREE_TYPE (TREE_TYPE (fndecl)),
11338 result);*/
11339 return result;
11340 }
11341 case BUILT_IN_ISNORMAL:
11342 {
11343 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
11344 islessequal(fabs(x),DBL_MAX). */
11345 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11346 tree type = TREE_TYPE (arg);
11347 tree orig_arg, max_exp, min_exp;
11348 machine_mode orig_mode = mode;
11349 REAL_VALUE_TYPE rmax, rmin;
11350 char buf[128];
11351
11352 orig_arg = arg = builtin_save_expr (arg);
11353 if (is_ibm_extended)
11354 {
11355 /* Use double to test the normal range of IBM extended
11356 precision. Emin for IBM extended precision is
11357 different to emin for IEEE double, being 53 higher
11358 since the low double exponent is at least 53 lower
11359 than the high double exponent. */
11360 type = double_type_node;
11361 mode = DFmode;
11362 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11363 }
11364 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
11365
11366 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11367 real_from_string (&rmax, buf);
11368 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
11369 real_from_string (&rmin, buf);
11370 max_exp = build_real (type, rmax);
11371 min_exp = build_real (type, rmin);
11372
11373 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
11374 if (is_ibm_extended)
11375 {
11376 /* Testing the high end of the range is done just using
11377 the high double, using the same test as isfinite().
11378 For the subnormal end of the range we first test the
11379 high double, then if its magnitude is equal to the
11380 limit of 0x1p-969, we test whether the low double is
11381 non-zero and opposite sign to the high double. */
11382 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
11383 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11384 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
11385 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
11386 arg, min_exp);
11387 tree as_complex = build1 (VIEW_CONVERT_EXPR,
11388 complex_double_type_node, orig_arg);
11389 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
11390 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
11391 tree zero = build_real (type, dconst0);
11392 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
11393 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
11394 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
11395 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
11396 fold_build3 (COND_EXPR,
11397 integer_type_node,
11398 hilt, logt, lolt));
11399 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
11400 eq_min, ok_lo);
11401 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
11402 gt_min, eq_min);
11403 }
11404 else
11405 {
11406 tree const isge_fn
11407 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
11408 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
11409 }
11410 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
11411 max_exp, min_exp);
11412 return result;
11413 }
11414 default:
11415 break;
11416 }
11417
11418 return NULL_TREE;
11419 }
11420
11421 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
11422 ARG is the argument for the call. */
11423
11424 static tree
11425 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
11426 {
11427 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11428
11429 if (!validate_arg (arg, REAL_TYPE))
11430 return NULL_TREE;
11431
11432 switch (builtin_index)
11433 {
11434 case BUILT_IN_ISINF:
11435 if (tree_expr_infinite_p (arg))
11436 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11437 if (!tree_expr_maybe_infinite_p (arg))
11438 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11439 return NULL_TREE;
11440
11441 case BUILT_IN_ISINF_SIGN:
11442 {
11443 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
11444 /* In a boolean context, GCC will fold the inner COND_EXPR to
11445 1. So e.g. "if (isinf_sign(x))" would be folded to just
11446 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
11447 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
11448 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
11449 tree tmp = NULL_TREE;
11450
11451 arg = builtin_save_expr (arg);
11452
11453 if (signbit_fn && isinf_fn)
11454 {
11455 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
11456 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
11457
11458 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11459 signbit_call, integer_zero_node);
11460 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11461 isinf_call, integer_zero_node);
11462
11463 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
11464 integer_minus_one_node, integer_one_node);
11465 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11466 isinf_call, tmp,
11467 integer_zero_node);
11468 }
11469
11470 return tmp;
11471 }
11472
11473 case BUILT_IN_ISFINITE:
11474 if (tree_expr_finite_p (arg))
11475 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11476 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
11477 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11478 return NULL_TREE;
11479
11480 case BUILT_IN_ISNAN:
11481 if (tree_expr_nan_p (arg))
11482 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11483 if (!tree_expr_maybe_nan_p (arg))
11484 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11485
11486 {
11487 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
11488 if (is_ibm_extended)
11489 {
11490 /* NaN and Inf are encoded in the high-order double value
11491 only. The low-order value is not significant. */
11492 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
11493 }
11494 }
11495 arg = builtin_save_expr (arg);
11496 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
11497
11498 default:
11499 gcc_unreachable ();
11500 }
11501 }
11502
11503 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
11504 This builtin will generate code to return the appropriate floating
11505 point classification depending on the value of the floating point
11506 number passed in. The possible return values must be supplied as
11507 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
11508 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
11509 one floating point argument which is "type generic". */
11510
11511 static tree
11512 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
11513 {
11514 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
11515 arg, type, res, tmp;
11516 machine_mode mode;
11517 REAL_VALUE_TYPE r;
11518 char buf[128];
11519
11520 /* Verify the required arguments in the original call. */
11521 if (nargs != 6
11522 || !validate_arg (args[0], INTEGER_TYPE)
11523 || !validate_arg (args[1], INTEGER_TYPE)
11524 || !validate_arg (args[2], INTEGER_TYPE)
11525 || !validate_arg (args[3], INTEGER_TYPE)
11526 || !validate_arg (args[4], INTEGER_TYPE)
11527 || !validate_arg (args[5], REAL_TYPE))
11528 return NULL_TREE;
11529
11530 fp_nan = args[0];
11531 fp_infinite = args[1];
11532 fp_normal = args[2];
11533 fp_subnormal = args[3];
11534 fp_zero = args[4];
11535 arg = args[5];
11536 type = TREE_TYPE (arg);
11537 mode = TYPE_MODE (type);
11538 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
11539
11540 /* fpclassify(x) ->
11541 isnan(x) ? FP_NAN :
11542 (fabs(x) == Inf ? FP_INFINITE :
11543 (fabs(x) >= DBL_MIN ? FP_NORMAL :
11544 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
11545
11546 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11547 build_real (type, dconst0));
11548 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11549 tmp, fp_zero, fp_subnormal);
11550
11551 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
11552 real_from_string (&r, buf);
11553 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
11554 arg, build_real (type, r));
11555 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
11556
11557 if (tree_expr_maybe_infinite_p (arg))
11558 {
11559 real_inf (&r);
11560 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11561 build_real (type, r));
11562 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
11563 fp_infinite, res);
11564 }
11565
11566 if (tree_expr_maybe_nan_p (arg))
11567 {
11568 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
11569 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
11570 }
11571
11572 return res;
11573 }
11574
11575 /* Fold a call to an unordered comparison function such as
11576 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
11577 being called and ARG0 and ARG1 are the arguments for the call.
11578 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
11579 the opposite of the desired result. UNORDERED_CODE is used
11580 for modes that can hold NaNs and ORDERED_CODE is used for
11581 the rest. */
11582
11583 static tree
11584 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
11585 enum tree_code unordered_code,
11586 enum tree_code ordered_code)
11587 {
11588 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11589 enum tree_code code;
11590 tree type0, type1;
11591 enum tree_code code0, code1;
11592 tree cmp_type = NULL_TREE;
11593
11594 type0 = TREE_TYPE (arg0);
11595 type1 = TREE_TYPE (arg1);
11596
11597 code0 = TREE_CODE (type0);
11598 code1 = TREE_CODE (type1);
11599
11600 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
11601 /* Choose the wider of two real types. */
11602 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
11603 ? type0 : type1;
11604 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
11605 cmp_type = type0;
11606 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
11607 cmp_type = type1;
11608
11609 arg0 = fold_convert_loc (loc, cmp_type, arg0);
11610 arg1 = fold_convert_loc (loc, cmp_type, arg1);
11611
11612 if (unordered_code == UNORDERED_EXPR)
11613 {
11614 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
11615 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
11616 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
11617 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
11618 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
11619 }
11620
11621 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
11622 ? unordered_code : ordered_code;
11623 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
11624 fold_build2_loc (loc, code, type, arg0, arg1));
11625 }
11626
11627 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
11628 arithmetics if it can never overflow, or into internal functions that
11629 return both result of arithmetics and overflowed boolean flag in
11630 a complex integer result, or some other check for overflow.
11631 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
11632 checking part of that. */
11633
11634 static tree
11635 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
11636 tree arg0, tree arg1, tree arg2)
11637 {
11638 enum internal_fn ifn = IFN_LAST;
11639 /* The code of the expression corresponding to the built-in. */
11640 enum tree_code opcode = ERROR_MARK;
11641 bool ovf_only = false;
11642
11643 switch (fcode)
11644 {
11645 case BUILT_IN_ADD_OVERFLOW_P:
11646 ovf_only = true;
11647 /* FALLTHRU */
11648 case BUILT_IN_ADD_OVERFLOW:
11649 case BUILT_IN_SADD_OVERFLOW:
11650 case BUILT_IN_SADDL_OVERFLOW:
11651 case BUILT_IN_SADDLL_OVERFLOW:
11652 case BUILT_IN_UADD_OVERFLOW:
11653 case BUILT_IN_UADDL_OVERFLOW:
11654 case BUILT_IN_UADDLL_OVERFLOW:
11655 opcode = PLUS_EXPR;
11656 ifn = IFN_ADD_OVERFLOW;
11657 break;
11658 case BUILT_IN_SUB_OVERFLOW_P:
11659 ovf_only = true;
11660 /* FALLTHRU */
11661 case BUILT_IN_SUB_OVERFLOW:
11662 case BUILT_IN_SSUB_OVERFLOW:
11663 case BUILT_IN_SSUBL_OVERFLOW:
11664 case BUILT_IN_SSUBLL_OVERFLOW:
11665 case BUILT_IN_USUB_OVERFLOW:
11666 case BUILT_IN_USUBL_OVERFLOW:
11667 case BUILT_IN_USUBLL_OVERFLOW:
11668 opcode = MINUS_EXPR;
11669 ifn = IFN_SUB_OVERFLOW;
11670 break;
11671 case BUILT_IN_MUL_OVERFLOW_P:
11672 ovf_only = true;
11673 /* FALLTHRU */
11674 case BUILT_IN_MUL_OVERFLOW:
11675 case BUILT_IN_SMUL_OVERFLOW:
11676 case BUILT_IN_SMULL_OVERFLOW:
11677 case BUILT_IN_SMULLL_OVERFLOW:
11678 case BUILT_IN_UMUL_OVERFLOW:
11679 case BUILT_IN_UMULL_OVERFLOW:
11680 case BUILT_IN_UMULLL_OVERFLOW:
11681 opcode = MULT_EXPR;
11682 ifn = IFN_MUL_OVERFLOW;
11683 break;
11684 default:
11685 gcc_unreachable ();
11686 }
11687
11688 /* For the "generic" overloads, the first two arguments can have different
11689 types and the last argument determines the target type to use to check
11690 for overflow. The arguments of the other overloads all have the same
11691 type. */
11692 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
11693
11694 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
11695 arguments are constant, attempt to fold the built-in call into a constant
11696 expression indicating whether or not it detected an overflow. */
11697 if (ovf_only
11698 && TREE_CODE (arg0) == INTEGER_CST
11699 && TREE_CODE (arg1) == INTEGER_CST)
11700 /* Perform the computation in the target type and check for overflow. */
11701 return omit_one_operand_loc (loc, boolean_type_node,
11702 arith_overflowed_p (opcode, type, arg0, arg1)
11703 ? boolean_true_node : boolean_false_node,
11704 arg2);
11705
11706 tree intres, ovfres;
11707 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
11708 {
11709 intres = fold_binary_loc (loc, opcode, type,
11710 fold_convert_loc (loc, type, arg0),
11711 fold_convert_loc (loc, type, arg1));
11712 if (TREE_OVERFLOW (intres))
11713 intres = drop_tree_overflow (intres);
11714 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
11715 ? boolean_true_node : boolean_false_node);
11716 }
11717 else
11718 {
11719 tree ctype = build_complex_type (type);
11720 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
11721 arg0, arg1);
11722 tree tgt = save_expr (call);
11723 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
11724 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
11725 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
11726 }
11727
11728 if (ovf_only)
11729 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
11730
11731 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
11732 tree store
11733 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
11734 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
11735 }
11736
11737 /* Fold a call to __builtin_FILE to a constant string. */
11738
11739 static inline tree
11740 fold_builtin_FILE (location_t loc)
11741 {
11742 if (const char *fname = LOCATION_FILE (loc))
11743 {
11744 /* The documentation says this builtin is equivalent to the preprocessor
11745 __FILE__ macro so it appears appropriate to use the same file prefix
11746 mappings. */
11747 fname = remap_macro_filename (fname);
11748 return build_string_literal (strlen (fname) + 1, fname);
11749 }
11750
11751 return build_string_literal (1, "");
11752 }
11753
11754 /* Fold a call to __builtin_FUNCTION to a constant string. */
11755
11756 static inline tree
11757 fold_builtin_FUNCTION ()
11758 {
11759 const char *name = "";
11760
11761 if (current_function_decl)
11762 name = lang_hooks.decl_printable_name (current_function_decl, 0);
11763
11764 return build_string_literal (strlen (name) + 1, name);
11765 }
11766
11767 /* Fold a call to __builtin_LINE to an integer constant. */
11768
11769 static inline tree
11770 fold_builtin_LINE (location_t loc, tree type)
11771 {
11772 return build_int_cst (type, LOCATION_LINE (loc));
11773 }
11774
11775 /* Fold a call to built-in function FNDECL with 0 arguments.
11776 This function returns NULL_TREE if no simplification was possible. */
11777
11778 static tree
11779 fold_builtin_0 (location_t loc, tree fndecl)
11780 {
11781 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11782 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11783 switch (fcode)
11784 {
11785 case BUILT_IN_FILE:
11786 return fold_builtin_FILE (loc);
11787
11788 case BUILT_IN_FUNCTION:
11789 return fold_builtin_FUNCTION ();
11790
11791 case BUILT_IN_LINE:
11792 return fold_builtin_LINE (loc, type);
11793
11794 CASE_FLT_FN (BUILT_IN_INF):
11795 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
11796 case BUILT_IN_INFD32:
11797 case BUILT_IN_INFD64:
11798 case BUILT_IN_INFD128:
11799 return fold_builtin_inf (loc, type, true);
11800
11801 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
11802 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
11803 return fold_builtin_inf (loc, type, false);
11804
11805 case BUILT_IN_CLASSIFY_TYPE:
11806 return fold_builtin_classify_type (NULL_TREE);
11807
11808 default:
11809 break;
11810 }
11811 return NULL_TREE;
11812 }
11813
11814 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
11815 This function returns NULL_TREE if no simplification was possible. */
11816
11817 static tree
11818 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
11819 {
11820 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11821 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11822
11823 if (TREE_CODE (arg0) == ERROR_MARK)
11824 return NULL_TREE;
11825
11826 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
11827 return ret;
11828
11829 switch (fcode)
11830 {
11831 case BUILT_IN_CONSTANT_P:
11832 {
11833 tree val = fold_builtin_constant_p (arg0);
11834
11835 /* Gimplification will pull the CALL_EXPR for the builtin out of
11836 an if condition. When not optimizing, we'll not CSE it back.
11837 To avoid link error types of regressions, return false now. */
11838 if (!val && !optimize)
11839 val = integer_zero_node;
11840
11841 return val;
11842 }
11843
11844 case BUILT_IN_CLASSIFY_TYPE:
11845 return fold_builtin_classify_type (arg0);
11846
11847 case BUILT_IN_STRLEN:
11848 return fold_builtin_strlen (loc, expr, type, arg0);
11849
11850 CASE_FLT_FN (BUILT_IN_FABS):
11851 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11852 case BUILT_IN_FABSD32:
11853 case BUILT_IN_FABSD64:
11854 case BUILT_IN_FABSD128:
11855 return fold_builtin_fabs (loc, arg0, type);
11856
11857 case BUILT_IN_ABS:
11858 case BUILT_IN_LABS:
11859 case BUILT_IN_LLABS:
11860 case BUILT_IN_IMAXABS:
11861 return fold_builtin_abs (loc, arg0, type);
11862
11863 CASE_FLT_FN (BUILT_IN_CONJ):
11864 if (validate_arg (arg0, COMPLEX_TYPE)
11865 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11866 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
11867 break;
11868
11869 CASE_FLT_FN (BUILT_IN_CREAL):
11870 if (validate_arg (arg0, COMPLEX_TYPE)
11871 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11872 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
11873 break;
11874
11875 CASE_FLT_FN (BUILT_IN_CIMAG):
11876 if (validate_arg (arg0, COMPLEX_TYPE)
11877 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11878 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
11879 break;
11880
11881 CASE_FLT_FN (BUILT_IN_CARG):
11882 return fold_builtin_carg (loc, arg0, type);
11883
11884 case BUILT_IN_ISASCII:
11885 return fold_builtin_isascii (loc, arg0);
11886
11887 case BUILT_IN_TOASCII:
11888 return fold_builtin_toascii (loc, arg0);
11889
11890 case BUILT_IN_ISDIGIT:
11891 return fold_builtin_isdigit (loc, arg0);
11892
11893 CASE_FLT_FN (BUILT_IN_FINITE):
11894 case BUILT_IN_FINITED32:
11895 case BUILT_IN_FINITED64:
11896 case BUILT_IN_FINITED128:
11897 case BUILT_IN_ISFINITE:
11898 {
11899 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
11900 if (ret)
11901 return ret;
11902 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11903 }
11904
11905 CASE_FLT_FN (BUILT_IN_ISINF):
11906 case BUILT_IN_ISINFD32:
11907 case BUILT_IN_ISINFD64:
11908 case BUILT_IN_ISINFD128:
11909 {
11910 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
11911 if (ret)
11912 return ret;
11913 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11914 }
11915
11916 case BUILT_IN_ISNORMAL:
11917 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11918
11919 case BUILT_IN_ISINF_SIGN:
11920 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
11921
11922 CASE_FLT_FN (BUILT_IN_ISNAN):
11923 case BUILT_IN_ISNAND32:
11924 case BUILT_IN_ISNAND64:
11925 case BUILT_IN_ISNAND128:
11926 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
11927
11928 case BUILT_IN_FREE:
11929 if (integer_zerop (arg0))
11930 return build_empty_stmt (loc);
11931 break;
11932
11933 default:
11934 break;
11935 }
11936
11937 return NULL_TREE;
11938
11939 }
11940
11941 /* Folds a call EXPR (which may be null) to built-in function FNDECL
11942 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
11943 if no simplification was possible. */
11944
11945 static tree
11946 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
11947 {
11948 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11949 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11950
11951 if (TREE_CODE (arg0) == ERROR_MARK
11952 || TREE_CODE (arg1) == ERROR_MARK)
11953 return NULL_TREE;
11954
11955 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
11956 return ret;
11957
11958 switch (fcode)
11959 {
11960 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
11961 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
11962 if (validate_arg (arg0, REAL_TYPE)
11963 && validate_arg (arg1, POINTER_TYPE))
11964 return do_mpfr_lgamma_r (arg0, arg1, type);
11965 break;
11966
11967 CASE_FLT_FN (BUILT_IN_FREXP):
11968 return fold_builtin_frexp (loc, arg0, arg1, type);
11969
11970 CASE_FLT_FN (BUILT_IN_MODF):
11971 return fold_builtin_modf (loc, arg0, arg1, type);
11972
11973 case BUILT_IN_STRSPN:
11974 return fold_builtin_strspn (loc, expr, arg0, arg1);
11975
11976 case BUILT_IN_STRCSPN:
11977 return fold_builtin_strcspn (loc, expr, arg0, arg1);
11978
11979 case BUILT_IN_STRPBRK:
11980 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
11981
11982 case BUILT_IN_EXPECT:
11983 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
11984
11985 case BUILT_IN_ISGREATER:
11986 return fold_builtin_unordered_cmp (loc, fndecl,
11987 arg0, arg1, UNLE_EXPR, LE_EXPR);
11988 case BUILT_IN_ISGREATEREQUAL:
11989 return fold_builtin_unordered_cmp (loc, fndecl,
11990 arg0, arg1, UNLT_EXPR, LT_EXPR);
11991 case BUILT_IN_ISLESS:
11992 return fold_builtin_unordered_cmp (loc, fndecl,
11993 arg0, arg1, UNGE_EXPR, GE_EXPR);
11994 case BUILT_IN_ISLESSEQUAL:
11995 return fold_builtin_unordered_cmp (loc, fndecl,
11996 arg0, arg1, UNGT_EXPR, GT_EXPR);
11997 case BUILT_IN_ISLESSGREATER:
11998 return fold_builtin_unordered_cmp (loc, fndecl,
11999 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
12000 case BUILT_IN_ISUNORDERED:
12001 return fold_builtin_unordered_cmp (loc, fndecl,
12002 arg0, arg1, UNORDERED_EXPR,
12003 NOP_EXPR);
12004
12005 /* We do the folding for va_start in the expander. */
12006 case BUILT_IN_VA_START:
12007 break;
12008
12009 case BUILT_IN_OBJECT_SIZE:
12010 return fold_builtin_object_size (arg0, arg1);
12011
12012 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
12013 return fold_builtin_atomic_always_lock_free (arg0, arg1);
12014
12015 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
12016 return fold_builtin_atomic_is_lock_free (arg0, arg1);
12017
12018 default:
12019 break;
12020 }
12021 return NULL_TREE;
12022 }
12023
12024 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
12025 and ARG2.
12026 This function returns NULL_TREE if no simplification was possible. */
12027
12028 static tree
12029 fold_builtin_3 (location_t loc, tree fndecl,
12030 tree arg0, tree arg1, tree arg2)
12031 {
12032 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12033 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12034
12035 if (TREE_CODE (arg0) == ERROR_MARK
12036 || TREE_CODE (arg1) == ERROR_MARK
12037 || TREE_CODE (arg2) == ERROR_MARK)
12038 return NULL_TREE;
12039
12040 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
12041 arg0, arg1, arg2))
12042 return ret;
12043
12044 switch (fcode)
12045 {
12046
12047 CASE_FLT_FN (BUILT_IN_SINCOS):
12048 return fold_builtin_sincos (loc, arg0, arg1, arg2);
12049
12050 CASE_FLT_FN (BUILT_IN_REMQUO):
12051 if (validate_arg (arg0, REAL_TYPE)
12052 && validate_arg (arg1, REAL_TYPE)
12053 && validate_arg (arg2, POINTER_TYPE))
12054 return do_mpfr_remquo (arg0, arg1, arg2);
12055 break;
12056
12057 case BUILT_IN_MEMCMP:
12058 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
12059
12060 case BUILT_IN_EXPECT:
12061 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
12062
12063 case BUILT_IN_EXPECT_WITH_PROBABILITY:
12064 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
12065
12066 case BUILT_IN_ADD_OVERFLOW:
12067 case BUILT_IN_SUB_OVERFLOW:
12068 case BUILT_IN_MUL_OVERFLOW:
12069 case BUILT_IN_ADD_OVERFLOW_P:
12070 case BUILT_IN_SUB_OVERFLOW_P:
12071 case BUILT_IN_MUL_OVERFLOW_P:
12072 case BUILT_IN_SADD_OVERFLOW:
12073 case BUILT_IN_SADDL_OVERFLOW:
12074 case BUILT_IN_SADDLL_OVERFLOW:
12075 case BUILT_IN_SSUB_OVERFLOW:
12076 case BUILT_IN_SSUBL_OVERFLOW:
12077 case BUILT_IN_SSUBLL_OVERFLOW:
12078 case BUILT_IN_SMUL_OVERFLOW:
12079 case BUILT_IN_SMULL_OVERFLOW:
12080 case BUILT_IN_SMULLL_OVERFLOW:
12081 case BUILT_IN_UADD_OVERFLOW:
12082 case BUILT_IN_UADDL_OVERFLOW:
12083 case BUILT_IN_UADDLL_OVERFLOW:
12084 case BUILT_IN_USUB_OVERFLOW:
12085 case BUILT_IN_USUBL_OVERFLOW:
12086 case BUILT_IN_USUBLL_OVERFLOW:
12087 case BUILT_IN_UMUL_OVERFLOW:
12088 case BUILT_IN_UMULL_OVERFLOW:
12089 case BUILT_IN_UMULLL_OVERFLOW:
12090 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
12091
12092 default:
12093 break;
12094 }
12095 return NULL_TREE;
12096 }
12097
12098 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
12099 ARGS is an array of NARGS arguments. IGNORE is true if the result
12100 of the function call is ignored. This function returns NULL_TREE
12101 if no simplification was possible. */
12102
12103 static tree
12104 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
12105 int nargs, bool)
12106 {
12107 tree ret = NULL_TREE;
12108
12109 switch (nargs)
12110 {
12111 case 0:
12112 ret = fold_builtin_0 (loc, fndecl);
12113 break;
12114 case 1:
12115 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
12116 break;
12117 case 2:
12118 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
12119 break;
12120 case 3:
12121 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
12122 break;
12123 default:
12124 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
12125 break;
12126 }
12127 if (ret)
12128 {
12129 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
12130 SET_EXPR_LOCATION (ret, loc);
12131 return ret;
12132 }
12133 return NULL_TREE;
12134 }
12135
12136 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
12137 list ARGS along with N new arguments in NEWARGS. SKIP is the number
12138 of arguments in ARGS to be omitted. OLDNARGS is the number of
12139 elements in ARGS. */
12140
12141 static tree
12142 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
12143 int skip, tree fndecl, int n, va_list newargs)
12144 {
12145 int nargs = oldnargs - skip + n;
12146 tree *buffer;
12147
12148 if (n > 0)
12149 {
12150 int i, j;
12151
12152 buffer = XALLOCAVEC (tree, nargs);
12153 for (i = 0; i < n; i++)
12154 buffer[i] = va_arg (newargs, tree);
12155 for (j = skip; j < oldnargs; j++, i++)
12156 buffer[i] = args[j];
12157 }
12158 else
12159 buffer = args + skip;
12160
12161 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
12162 }
12163
12164 /* Return true if FNDECL shouldn't be folded right now.
12165 If a built-in function has an inline attribute always_inline
12166 wrapper, defer folding it after always_inline functions have
12167 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
12168 might not be performed. */
12169
12170 bool
12171 avoid_folding_inline_builtin (tree fndecl)
12172 {
12173 return (DECL_DECLARED_INLINE_P (fndecl)
12174 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
12175 && cfun
12176 && !cfun->always_inline_functions_inlined
12177 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
12178 }
12179
12180 /* A wrapper function for builtin folding that prevents warnings for
12181 "statement without effect" and the like, caused by removing the
12182 call node earlier than the warning is generated. */
12183
12184 tree
12185 fold_call_expr (location_t loc, tree exp, bool ignore)
12186 {
12187 tree ret = NULL_TREE;
12188 tree fndecl = get_callee_fndecl (exp);
12189 if (fndecl && fndecl_built_in_p (fndecl)
12190 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
12191 yet. Defer folding until we see all the arguments
12192 (after inlining). */
12193 && !CALL_EXPR_VA_ARG_PACK (exp))
12194 {
12195 int nargs = call_expr_nargs (exp);
12196
12197 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
12198 instead last argument is __builtin_va_arg_pack (). Defer folding
12199 even in that case, until arguments are finalized. */
12200 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
12201 {
12202 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
12203 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12204 return NULL_TREE;
12205 }
12206
12207 if (avoid_folding_inline_builtin (fndecl))
12208 return NULL_TREE;
12209
12210 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12211 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
12212 CALL_EXPR_ARGP (exp), ignore);
12213 else
12214 {
12215 tree *args = CALL_EXPR_ARGP (exp);
12216 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
12217 if (ret)
12218 return ret;
12219 }
12220 }
12221 return NULL_TREE;
12222 }
12223
12224 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
12225 N arguments are passed in the array ARGARRAY. Return a folded
12226 expression or NULL_TREE if no simplification was possible. */
12227
12228 tree
12229 fold_builtin_call_array (location_t loc, tree,
12230 tree fn,
12231 int n,
12232 tree *argarray)
12233 {
12234 if (TREE_CODE (fn) != ADDR_EXPR)
12235 return NULL_TREE;
12236
12237 tree fndecl = TREE_OPERAND (fn, 0);
12238 if (TREE_CODE (fndecl) == FUNCTION_DECL
12239 && fndecl_built_in_p (fndecl))
12240 {
12241 /* If last argument is __builtin_va_arg_pack (), arguments to this
12242 function are not finalized yet. Defer folding until they are. */
12243 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
12244 {
12245 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
12246 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12247 return NULL_TREE;
12248 }
12249 if (avoid_folding_inline_builtin (fndecl))
12250 return NULL_TREE;
12251 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12252 return targetm.fold_builtin (fndecl, n, argarray, false);
12253 else
12254 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
12255 }
12256
12257 return NULL_TREE;
12258 }
12259
12260 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
12261 along with N new arguments specified as the "..." parameters. SKIP
12262 is the number of arguments in EXP to be omitted. This function is used
12263 to do varargs-to-varargs transformations. */
12264
12265 static tree
12266 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
12267 {
12268 va_list ap;
12269 tree t;
12270
12271 va_start (ap, n);
12272 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
12273 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
12274 va_end (ap);
12275
12276 return t;
12277 }
12278
12279 /* Validate a single argument ARG against a tree code CODE representing
12280 a type. Return true when argument is valid. */
12281
12282 static bool
12283 validate_arg (const_tree arg, enum tree_code code)
12284 {
12285 if (!arg)
12286 return false;
12287 else if (code == POINTER_TYPE)
12288 return POINTER_TYPE_P (TREE_TYPE (arg));
12289 else if (code == INTEGER_TYPE)
12290 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
12291 return code == TREE_CODE (TREE_TYPE (arg));
12292 }
12293
12294 /* This function validates the types of a function call argument list
12295 against a specified list of tree_codes. If the last specifier is a 0,
12296 that represents an ellipses, otherwise the last specifier must be a
12297 VOID_TYPE.
12298
12299 This is the GIMPLE version of validate_arglist. Eventually we want to
12300 completely convert builtins.c to work from GIMPLEs and the tree based
12301 validate_arglist will then be removed. */
12302
12303 bool
12304 validate_gimple_arglist (const gcall *call, ...)
12305 {
12306 enum tree_code code;
12307 bool res = 0;
12308 va_list ap;
12309 const_tree arg;
12310 size_t i;
12311
12312 va_start (ap, call);
12313 i = 0;
12314
12315 do
12316 {
12317 code = (enum tree_code) va_arg (ap, int);
12318 switch (code)
12319 {
12320 case 0:
12321 /* This signifies an ellipses, any further arguments are all ok. */
12322 res = true;
12323 goto end;
12324 case VOID_TYPE:
12325 /* This signifies an endlink, if no arguments remain, return
12326 true, otherwise return false. */
12327 res = (i == gimple_call_num_args (call));
12328 goto end;
12329 default:
12330 /* If no parameters remain or the parameter's code does not
12331 match the specified code, return false. Otherwise continue
12332 checking any remaining arguments. */
12333 arg = gimple_call_arg (call, i++);
12334 if (!validate_arg (arg, code))
12335 goto end;
12336 break;
12337 }
12338 }
12339 while (1);
12340
12341 /* We need gotos here since we can only have one VA_CLOSE in a
12342 function. */
12343 end: ;
12344 va_end (ap);
12345
12346 return res;
12347 }
12348
12349 /* Default target-specific builtin expander that does nothing. */
12350
12351 rtx
12352 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
12353 rtx target ATTRIBUTE_UNUSED,
12354 rtx subtarget ATTRIBUTE_UNUSED,
12355 machine_mode mode ATTRIBUTE_UNUSED,
12356 int ignore ATTRIBUTE_UNUSED)
12357 {
12358 return NULL_RTX;
12359 }
12360
12361 /* Returns true is EXP represents data that would potentially reside
12362 in a readonly section. */
12363
12364 bool
12365 readonly_data_expr (tree exp)
12366 {
12367 STRIP_NOPS (exp);
12368
12369 if (TREE_CODE (exp) != ADDR_EXPR)
12370 return false;
12371
12372 exp = get_base_address (TREE_OPERAND (exp, 0));
12373 if (!exp)
12374 return false;
12375
12376 /* Make sure we call decl_readonly_section only for trees it
12377 can handle (since it returns true for everything it doesn't
12378 understand). */
12379 if (TREE_CODE (exp) == STRING_CST
12380 || TREE_CODE (exp) == CONSTRUCTOR
12381 || (VAR_P (exp) && TREE_STATIC (exp)))
12382 return decl_readonly_section (exp, 0);
12383 else
12384 return false;
12385 }
12386
12387 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
12388 to the call, and TYPE is its return type.
12389
12390 Return NULL_TREE if no simplification was possible, otherwise return the
12391 simplified form of the call as a tree.
12392
12393 The simplified form may be a constant or other expression which
12394 computes the same value, but in a more efficient manner (including
12395 calls to other builtin functions).
12396
12397 The call may contain arguments which need to be evaluated, but
12398 which are not useful to determine the result of the call. In
12399 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12400 COMPOUND_EXPR will be an argument which must be evaluated.
12401 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12402 COMPOUND_EXPR in the chain will contain the tree for the simplified
12403 form of the builtin function call. */
12404
12405 static tree
12406 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
12407 {
12408 if (!validate_arg (s1, POINTER_TYPE)
12409 || !validate_arg (s2, POINTER_TYPE))
12410 return NULL_TREE;
12411
12412 tree fn;
12413 const char *p1, *p2;
12414
12415 p2 = c_getstr (s2);
12416 if (p2 == NULL)
12417 return NULL_TREE;
12418
12419 p1 = c_getstr (s1);
12420 if (p1 != NULL)
12421 {
12422 const char *r = strpbrk (p1, p2);
12423 tree tem;
12424
12425 if (r == NULL)
12426 return build_int_cst (TREE_TYPE (s1), 0);
12427
12428 /* Return an offset into the constant string argument. */
12429 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
12430 return fold_convert_loc (loc, type, tem);
12431 }
12432
12433 if (p2[0] == '\0')
12434 /* strpbrk(x, "") == NULL.
12435 Evaluate and ignore s1 in case it had side-effects. */
12436 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
12437
12438 if (p2[1] != '\0')
12439 return NULL_TREE; /* Really call strpbrk. */
12440
12441 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
12442 if (!fn)
12443 return NULL_TREE;
12444
12445 /* New argument list transforming strpbrk(s1, s2) to
12446 strchr(s1, s2[0]). */
12447 return build_call_expr_loc (loc, fn, 2, s1,
12448 build_int_cst (integer_type_node, p2[0]));
12449 }
12450
12451 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
12452 to the call.
12453
12454 Return NULL_TREE if no simplification was possible, otherwise return the
12455 simplified form of the call as a tree.
12456
12457 The simplified form may be a constant or other expression which
12458 computes the same value, but in a more efficient manner (including
12459 calls to other builtin functions).
12460
12461 The call may contain arguments which need to be evaluated, but
12462 which are not useful to determine the result of the call. In
12463 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12464 COMPOUND_EXPR will be an argument which must be evaluated.
12465 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12466 COMPOUND_EXPR in the chain will contain the tree for the simplified
12467 form of the builtin function call. */
12468
12469 static tree
12470 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
12471 {
12472 if (!validate_arg (s1, POINTER_TYPE)
12473 || !validate_arg (s2, POINTER_TYPE))
12474 return NULL_TREE;
12475
12476 if (!check_nul_terminated_array (expr, s1)
12477 || !check_nul_terminated_array (expr, s2))
12478 return NULL_TREE;
12479
12480 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12481
12482 /* If either argument is "", return NULL_TREE. */
12483 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
12484 /* Evaluate and ignore both arguments in case either one has
12485 side-effects. */
12486 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
12487 s1, s2);
12488 return NULL_TREE;
12489 }
12490
12491 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12492 to the call.
12493
12494 Return NULL_TREE if no simplification was possible, otherwise return the
12495 simplified form of the call as a tree.
12496
12497 The simplified form may be a constant or other expression which
12498 computes the same value, but in a more efficient manner (including
12499 calls to other builtin functions).
12500
12501 The call may contain arguments which need to be evaluated, but
12502 which are not useful to determine the result of the call. In
12503 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12504 COMPOUND_EXPR will be an argument which must be evaluated.
12505 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12506 COMPOUND_EXPR in the chain will contain the tree for the simplified
12507 form of the builtin function call. */
12508
12509 static tree
12510 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
12511 {
12512 if (!validate_arg (s1, POINTER_TYPE)
12513 || !validate_arg (s2, POINTER_TYPE))
12514 return NULL_TREE;
12515
12516 if (!check_nul_terminated_array (expr, s1)
12517 || !check_nul_terminated_array (expr, s2))
12518 return NULL_TREE;
12519
12520 /* If the first argument is "", return NULL_TREE. */
12521 const char *p1 = c_getstr (s1);
12522 if (p1 && *p1 == '\0')
12523 {
12524 /* Evaluate and ignore argument s2 in case it has
12525 side-effects. */
12526 return omit_one_operand_loc (loc, size_type_node,
12527 size_zero_node, s2);
12528 }
12529
12530 /* If the second argument is "", return __builtin_strlen(s1). */
12531 const char *p2 = c_getstr (s2);
12532 if (p2 && *p2 == '\0')
12533 {
12534 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12535
12536 /* If the replacement _DECL isn't initialized, don't do the
12537 transformation. */
12538 if (!fn)
12539 return NULL_TREE;
12540
12541 return build_call_expr_loc (loc, fn, 1, s1);
12542 }
12543 return NULL_TREE;
12544 }
12545
12546 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12547 produced. False otherwise. This is done so that we don't output the error
12548 or warning twice or three times. */
12549
12550 bool
12551 fold_builtin_next_arg (tree exp, bool va_start_p)
12552 {
12553 tree fntype = TREE_TYPE (current_function_decl);
12554 int nargs = call_expr_nargs (exp);
12555 tree arg;
12556 /* There is good chance the current input_location points inside the
12557 definition of the va_start macro (perhaps on the token for
12558 builtin) in a system header, so warnings will not be emitted.
12559 Use the location in real source code. */
12560 location_t current_location =
12561 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12562 NULL);
12563
12564 if (!stdarg_p (fntype))
12565 {
12566 error ("%<va_start%> used in function with fixed arguments");
12567 return true;
12568 }
12569
12570 if (va_start_p)
12571 {
12572 if (va_start_p && (nargs != 2))
12573 {
12574 error ("wrong number of arguments to function %<va_start%>");
12575 return true;
12576 }
12577 arg = CALL_EXPR_ARG (exp, 1);
12578 }
12579 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12580 when we checked the arguments and if needed issued a warning. */
12581 else
12582 {
12583 if (nargs == 0)
12584 {
12585 /* Evidently an out of date version of <stdarg.h>; can't validate
12586 va_start's second argument, but can still work as intended. */
12587 warning_at (current_location,
12588 OPT_Wvarargs,
12589 "%<__builtin_next_arg%> called without an argument");
12590 return true;
12591 }
12592 else if (nargs > 1)
12593 {
12594 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12595 return true;
12596 }
12597 arg = CALL_EXPR_ARG (exp, 0);
12598 }
12599
12600 if (TREE_CODE (arg) == SSA_NAME)
12601 arg = SSA_NAME_VAR (arg);
12602
12603 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12604 or __builtin_next_arg (0) the first time we see it, after checking
12605 the arguments and if needed issuing a warning. */
12606 if (!integer_zerop (arg))
12607 {
12608 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12609
12610 /* Strip off all nops for the sake of the comparison. This
12611 is not quite the same as STRIP_NOPS. It does more.
12612 We must also strip off INDIRECT_EXPR for C++ reference
12613 parameters. */
12614 while (CONVERT_EXPR_P (arg)
12615 || TREE_CODE (arg) == INDIRECT_REF)
12616 arg = TREE_OPERAND (arg, 0);
12617 if (arg != last_parm)
12618 {
12619 /* FIXME: Sometimes with the tree optimizers we can get the
12620 not the last argument even though the user used the last
12621 argument. We just warn and set the arg to be the last
12622 argument so that we will get wrong-code because of
12623 it. */
12624 warning_at (current_location,
12625 OPT_Wvarargs,
12626 "second parameter of %<va_start%> not last named argument");
12627 }
12628
12629 /* Undefined by C99 7.15.1.4p4 (va_start):
12630 "If the parameter parmN is declared with the register storage
12631 class, with a function or array type, or with a type that is
12632 not compatible with the type that results after application of
12633 the default argument promotions, the behavior is undefined."
12634 */
12635 else if (DECL_REGISTER (arg))
12636 {
12637 warning_at (current_location,
12638 OPT_Wvarargs,
12639 "undefined behavior when second parameter of "
12640 "%<va_start%> is declared with %<register%> storage");
12641 }
12642
12643 /* We want to verify the second parameter just once before the tree
12644 optimizers are run and then avoid keeping it in the tree,
12645 as otherwise we could warn even for correct code like:
12646 void foo (int i, ...)
12647 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12648 if (va_start_p)
12649 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12650 else
12651 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12652 }
12653 return false;
12654 }
12655
12656
12657 /* Expand a call EXP to __builtin_object_size. */
12658
12659 static rtx
12660 expand_builtin_object_size (tree exp)
12661 {
12662 tree ost;
12663 int object_size_type;
12664 tree fndecl = get_callee_fndecl (exp);
12665
12666 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12667 {
12668 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
12669 exp, fndecl);
12670 expand_builtin_trap ();
12671 return const0_rtx;
12672 }
12673
12674 ost = CALL_EXPR_ARG (exp, 1);
12675 STRIP_NOPS (ost);
12676
12677 if (TREE_CODE (ost) != INTEGER_CST
12678 || tree_int_cst_sgn (ost) < 0
12679 || compare_tree_int (ost, 3) > 0)
12680 {
12681 error ("%Klast argument of %qD is not integer constant between 0 and 3",
12682 exp, fndecl);
12683 expand_builtin_trap ();
12684 return const0_rtx;
12685 }
12686
12687 object_size_type = tree_to_shwi (ost);
12688
12689 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12690 }
12691
12692 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12693 FCODE is the BUILT_IN_* to use.
12694 Return NULL_RTX if we failed; the caller should emit a normal call,
12695 otherwise try to get the result in TARGET, if convenient (and in
12696 mode MODE if that's convenient). */
12697
12698 static rtx
12699 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
12700 enum built_in_function fcode)
12701 {
12702 if (!validate_arglist (exp,
12703 POINTER_TYPE,
12704 fcode == BUILT_IN_MEMSET_CHK
12705 ? INTEGER_TYPE : POINTER_TYPE,
12706 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12707 return NULL_RTX;
12708
12709 tree dest = CALL_EXPR_ARG (exp, 0);
12710 tree src = CALL_EXPR_ARG (exp, 1);
12711 tree len = CALL_EXPR_ARG (exp, 2);
12712 tree size = CALL_EXPR_ARG (exp, 3);
12713
12714 /* FIXME: Set access mode to write only for memset et al. */
12715 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
12716 /*srcstr=*/NULL_TREE, size, access_read_write);
12717
12718 if (!tree_fits_uhwi_p (size))
12719 return NULL_RTX;
12720
12721 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12722 {
12723 /* Avoid transforming the checking call to an ordinary one when
12724 an overflow has been detected or when the call couldn't be
12725 validated because the size is not constant. */
12726 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
12727 return NULL_RTX;
12728
12729 tree fn = NULL_TREE;
12730 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12731 mem{cpy,pcpy,move,set} is available. */
12732 switch (fcode)
12733 {
12734 case BUILT_IN_MEMCPY_CHK:
12735 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12736 break;
12737 case BUILT_IN_MEMPCPY_CHK:
12738 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12739 break;
12740 case BUILT_IN_MEMMOVE_CHK:
12741 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12742 break;
12743 case BUILT_IN_MEMSET_CHK:
12744 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12745 break;
12746 default:
12747 break;
12748 }
12749
12750 if (! fn)
12751 return NULL_RTX;
12752
12753 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12754 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12755 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12756 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12757 }
12758 else if (fcode == BUILT_IN_MEMSET_CHK)
12759 return NULL_RTX;
12760 else
12761 {
12762 unsigned int dest_align = get_pointer_alignment (dest);
12763
12764 /* If DEST is not a pointer type, call the normal function. */
12765 if (dest_align == 0)
12766 return NULL_RTX;
12767
12768 /* If SRC and DEST are the same (and not volatile), do nothing. */
12769 if (operand_equal_p (src, dest, 0))
12770 {
12771 tree expr;
12772
12773 if (fcode != BUILT_IN_MEMPCPY_CHK)
12774 {
12775 /* Evaluate and ignore LEN in case it has side-effects. */
12776 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12777 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12778 }
12779
12780 expr = fold_build_pointer_plus (dest, len);
12781 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12782 }
12783
12784 /* __memmove_chk special case. */
12785 if (fcode == BUILT_IN_MEMMOVE_CHK)
12786 {
12787 unsigned int src_align = get_pointer_alignment (src);
12788
12789 if (src_align == 0)
12790 return NULL_RTX;
12791
12792 /* If src is categorized for a readonly section we can use
12793 normal __memcpy_chk. */
12794 if (readonly_data_expr (src))
12795 {
12796 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12797 if (!fn)
12798 return NULL_RTX;
12799 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12800 dest, src, len, size);
12801 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12802 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12803 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12804 }
12805 }
12806 return NULL_RTX;
12807 }
12808 }
12809
12810 /* Emit warning if a buffer overflow is detected at compile time. */
12811
12812 static void
12813 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12814 {
12815 /* The source string. */
12816 tree srcstr = NULL_TREE;
12817 /* The size of the destination object returned by __builtin_object_size. */
12818 tree objsize = NULL_TREE;
12819 /* The string that is being concatenated with (as in __strcat_chk)
12820 or null if it isn't. */
12821 tree catstr = NULL_TREE;
12822 /* The maximum length of the source sequence in a bounded operation
12823 (such as __strncat_chk) or null if the operation isn't bounded
12824 (such as __strcat_chk). */
12825 tree maxread = NULL_TREE;
12826 /* The exact size of the access (such as in __strncpy_chk). */
12827 tree size = NULL_TREE;
12828 /* The access by the function that's checked. Except for snprintf
12829 both writing and reading is checked. */
12830 access_mode mode = access_read_write;
12831
12832 switch (fcode)
12833 {
12834 case BUILT_IN_STRCPY_CHK:
12835 case BUILT_IN_STPCPY_CHK:
12836 srcstr = CALL_EXPR_ARG (exp, 1);
12837 objsize = CALL_EXPR_ARG (exp, 2);
12838 break;
12839
12840 case BUILT_IN_STRCAT_CHK:
12841 /* For __strcat_chk the warning will be emitted only if overflowing
12842 by at least strlen (dest) + 1 bytes. */
12843 catstr = CALL_EXPR_ARG (exp, 0);
12844 srcstr = CALL_EXPR_ARG (exp, 1);
12845 objsize = CALL_EXPR_ARG (exp, 2);
12846 break;
12847
12848 case BUILT_IN_STRNCAT_CHK:
12849 catstr = CALL_EXPR_ARG (exp, 0);
12850 srcstr = CALL_EXPR_ARG (exp, 1);
12851 maxread = CALL_EXPR_ARG (exp, 2);
12852 objsize = CALL_EXPR_ARG (exp, 3);
12853 break;
12854
12855 case BUILT_IN_STRNCPY_CHK:
12856 case BUILT_IN_STPNCPY_CHK:
12857 srcstr = CALL_EXPR_ARG (exp, 1);
12858 size = CALL_EXPR_ARG (exp, 2);
12859 objsize = CALL_EXPR_ARG (exp, 3);
12860 break;
12861
12862 case BUILT_IN_SNPRINTF_CHK:
12863 case BUILT_IN_VSNPRINTF_CHK:
12864 maxread = CALL_EXPR_ARG (exp, 1);
12865 objsize = CALL_EXPR_ARG (exp, 3);
12866 /* The only checked access the write to the destination. */
12867 mode = access_write_only;
12868 break;
12869 default:
12870 gcc_unreachable ();
12871 }
12872
12873 if (catstr && maxread)
12874 {
12875 /* Check __strncat_chk. There is no way to determine the length
12876 of the string to which the source string is being appended so
12877 just warn when the length of the source string is not known. */
12878 check_strncat_sizes (exp, objsize);
12879 return;
12880 }
12881
12882 check_access (exp, size, maxread, srcstr, objsize, mode);
12883 }
12884
12885 /* Emit warning if a buffer overflow is detected at compile time
12886 in __sprintf_chk/__vsprintf_chk calls. */
12887
12888 static void
12889 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12890 {
12891 tree size, len, fmt;
12892 const char *fmt_str;
12893 int nargs = call_expr_nargs (exp);
12894
12895 /* Verify the required arguments in the original call. */
12896
12897 if (nargs < 4)
12898 return;
12899 size = CALL_EXPR_ARG (exp, 2);
12900 fmt = CALL_EXPR_ARG (exp, 3);
12901
12902 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12903 return;
12904
12905 /* Check whether the format is a literal string constant. */
12906 fmt_str = c_getstr (fmt);
12907 if (fmt_str == NULL)
12908 return;
12909
12910 if (!init_target_chars ())
12911 return;
12912
12913 /* If the format doesn't contain % args or %%, we know its size. */
12914 if (strchr (fmt_str, target_percent) == 0)
12915 len = build_int_cstu (size_type_node, strlen (fmt_str));
12916 /* If the format is "%s" and first ... argument is a string literal,
12917 we know it too. */
12918 else if (fcode == BUILT_IN_SPRINTF_CHK
12919 && strcmp (fmt_str, target_percent_s) == 0)
12920 {
12921 tree arg;
12922
12923 if (nargs < 5)
12924 return;
12925 arg = CALL_EXPR_ARG (exp, 4);
12926 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12927 return;
12928
12929 len = c_strlen (arg, 1);
12930 if (!len || ! tree_fits_uhwi_p (len))
12931 return;
12932 }
12933 else
12934 return;
12935
12936 /* Add one for the terminating nul. */
12937 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
12938
12939 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
12940 access_write_only);
12941 }
12942
12943 /* Return true if STMT is a call to an allocation function. Unless
12944 ALL_ALLOC is set, consider only functions that return dynmamically
12945 allocated objects. Otherwise return true even for all forms of
12946 alloca (including VLA). */
12947
12948 static bool
12949 fndecl_alloc_p (tree fndecl, bool all_alloc)
12950 {
12951 if (!fndecl)
12952 return false;
12953
12954 /* A call to operator new isn't recognized as one to a built-in. */
12955 if (DECL_IS_OPERATOR_NEW_P (fndecl))
12956 return true;
12957
12958 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12959 {
12960 switch (DECL_FUNCTION_CODE (fndecl))
12961 {
12962 case BUILT_IN_ALLOCA:
12963 case BUILT_IN_ALLOCA_WITH_ALIGN:
12964 return all_alloc;
12965 case BUILT_IN_ALIGNED_ALLOC:
12966 case BUILT_IN_CALLOC:
12967 case BUILT_IN_GOMP_ALLOC:
12968 case BUILT_IN_MALLOC:
12969 case BUILT_IN_REALLOC:
12970 case BUILT_IN_STRDUP:
12971 case BUILT_IN_STRNDUP:
12972 return true;
12973 default:
12974 break;
12975 }
12976 }
12977
12978 /* A function is considered an allocation function if it's declared
12979 with attribute malloc with an argument naming its associated
12980 deallocation function. */
12981 tree attrs = DECL_ATTRIBUTES (fndecl);
12982 if (!attrs)
12983 return false;
12984
12985 for (tree allocs = attrs;
12986 (allocs = lookup_attribute ("malloc", allocs));
12987 allocs = TREE_CHAIN (allocs))
12988 {
12989 tree args = TREE_VALUE (allocs);
12990 if (!args)
12991 continue;
12992
12993 if (TREE_VALUE (args))
12994 return true;
12995 }
12996
12997 return false;
12998 }
12999
13000 /* Return true if STMT is a call to an allocation function. A wrapper
13001 around fndecl_alloc_p. */
13002
13003 static bool
13004 gimple_call_alloc_p (gimple *stmt, bool all_alloc = false)
13005 {
13006 return fndecl_alloc_p (gimple_call_fndecl (stmt), all_alloc);
13007 }
13008
13009 /* Return the zero-based number corresponding to the argument being
13010 deallocated if STMT is a call to a deallocation function or UINT_MAX
13011 if it isn't. */
13012
13013 static unsigned
13014 call_dealloc_argno (tree exp)
13015 {
13016 tree fndecl = get_callee_fndecl (exp);
13017 if (!fndecl)
13018 return UINT_MAX;
13019
13020 return fndecl_dealloc_argno (fndecl);
13021 }
13022
13023 /* Return the zero-based number corresponding to the argument being
13024 deallocated if FNDECL is a deallocation function or UINT_MAX
13025 if it isn't. */
13026
13027 unsigned
13028 fndecl_dealloc_argno (tree fndecl)
13029 {
13030 /* A call to operator delete isn't recognized as one to a built-in. */
13031 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
13032 return 0;
13033
13034 /* TODO: Handle user-defined functions with attribute malloc? Handle
13035 known non-built-ins like fopen? */
13036 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13037 {
13038 switch (DECL_FUNCTION_CODE (fndecl))
13039 {
13040 case BUILT_IN_FREE:
13041 case BUILT_IN_REALLOC:
13042 return 0;
13043 default:
13044 break;
13045 }
13046 return UINT_MAX;
13047 }
13048
13049 tree attrs = DECL_ATTRIBUTES (fndecl);
13050 if (!attrs)
13051 return UINT_MAX;
13052
13053 for (tree atfree = attrs;
13054 (atfree = lookup_attribute ("*dealloc", atfree));
13055 atfree = TREE_CHAIN (atfree))
13056 {
13057 tree alloc = TREE_VALUE (atfree);
13058 if (!alloc)
13059 continue;
13060
13061 tree pos = TREE_CHAIN (alloc);
13062 if (!pos)
13063 return 0;
13064
13065 pos = TREE_VALUE (pos);
13066 return TREE_INT_CST_LOW (pos) - 1;
13067 }
13068
13069 return UINT_MAX;
13070 }
13071
13072 /* Return true if DELC doesn't refer to an operator delete that's
13073 suitable to call with a pointer returned from the operator new
13074 described by NEWC. */
13075
13076 static bool
13077 new_delete_mismatch_p (const demangle_component &newc,
13078 const demangle_component &delc)
13079 {
13080 if (newc.type != delc.type)
13081 return true;
13082
13083 switch (newc.type)
13084 {
13085 case DEMANGLE_COMPONENT_NAME:
13086 {
13087 int len = newc.u.s_name.len;
13088 const char *news = newc.u.s_name.s;
13089 const char *dels = delc.u.s_name.s;
13090 if (len != delc.u.s_name.len || memcmp (news, dels, len))
13091 return true;
13092
13093 if (news[len] == 'n')
13094 {
13095 if (news[len + 1] == 'a')
13096 return dels[len] != 'd' || dels[len + 1] != 'a';
13097 if (news[len + 1] == 'w')
13098 return dels[len] != 'd' || dels[len + 1] != 'l';
13099 }
13100 return false;
13101 }
13102
13103 case DEMANGLE_COMPONENT_OPERATOR:
13104 /* Operator mismatches are handled above. */
13105 return false;
13106
13107 case DEMANGLE_COMPONENT_EXTENDED_OPERATOR:
13108 if (newc.u.s_extended_operator.args != delc.u.s_extended_operator.args)
13109 return true;
13110 return new_delete_mismatch_p (*newc.u.s_extended_operator.name,
13111 *delc.u.s_extended_operator.name);
13112
13113 case DEMANGLE_COMPONENT_FIXED_TYPE:
13114 if (newc.u.s_fixed.accum != delc.u.s_fixed.accum
13115 || newc.u.s_fixed.sat != delc.u.s_fixed.sat)
13116 return true;
13117 return new_delete_mismatch_p (*newc.u.s_fixed.length,
13118 *delc.u.s_fixed.length);
13119
13120 case DEMANGLE_COMPONENT_CTOR:
13121 if (newc.u.s_ctor.kind != delc.u.s_ctor.kind)
13122 return true;
13123 return new_delete_mismatch_p (*newc.u.s_ctor.name,
13124 *delc.u.s_ctor.name);
13125
13126 case DEMANGLE_COMPONENT_DTOR:
13127 if (newc.u.s_dtor.kind != delc.u.s_dtor.kind)
13128 return true;
13129 return new_delete_mismatch_p (*newc.u.s_dtor.name,
13130 *delc.u.s_dtor.name);
13131
13132 case DEMANGLE_COMPONENT_BUILTIN_TYPE:
13133 {
13134 /* The demangler API provides no better way to compare built-in
13135 types except to by comparing their demangled names. */
13136 size_t nsz, dsz;
13137 demangle_component *pnc = const_cast<demangle_component *>(&newc);
13138 demangle_component *pdc = const_cast<demangle_component *>(&delc);
13139 char *nts = cplus_demangle_print (0, pnc, 16, &nsz);
13140 char *dts = cplus_demangle_print (0, pdc, 16, &dsz);
13141 if (!nts != !dts)
13142 return true;
13143 bool mismatch = strcmp (nts, dts);
13144 free (nts);
13145 free (dts);
13146 return mismatch;
13147 }
13148
13149 case DEMANGLE_COMPONENT_SUB_STD:
13150 if (newc.u.s_string.len != delc.u.s_string.len)
13151 return true;
13152 return memcmp (newc.u.s_string.string, delc.u.s_string.string,
13153 newc.u.s_string.len);
13154
13155 case DEMANGLE_COMPONENT_FUNCTION_PARAM:
13156 case DEMANGLE_COMPONENT_TEMPLATE_PARAM:
13157 return newc.u.s_number.number != delc.u.s_number.number;
13158
13159 case DEMANGLE_COMPONENT_CHARACTER:
13160 return newc.u.s_character.character != delc.u.s_character.character;
13161
13162 case DEMANGLE_COMPONENT_DEFAULT_ARG:
13163 case DEMANGLE_COMPONENT_LAMBDA:
13164 if (newc.u.s_unary_num.num != delc.u.s_unary_num.num)
13165 return true;
13166 return new_delete_mismatch_p (*newc.u.s_unary_num.sub,
13167 *delc.u.s_unary_num.sub);
13168 default:
13169 break;
13170 }
13171
13172 if (!newc.u.s_binary.left != !delc.u.s_binary.left)
13173 return true;
13174
13175 if (!newc.u.s_binary.left)
13176 return false;
13177
13178 if (new_delete_mismatch_p (*newc.u.s_binary.left, *delc.u.s_binary.left)
13179 || !newc.u.s_binary.right != !delc.u.s_binary.right)
13180 return true;
13181
13182 if (newc.u.s_binary.right)
13183 return new_delete_mismatch_p (*newc.u.s_binary.right,
13184 *delc.u.s_binary.right);
13185 return false;
13186 }
13187
13188 /* Return true if DELETE_DECL is an operator delete that's not suitable
13189 to call with a pointer returned fron NEW_DECL. */
13190
13191 static bool
13192 new_delete_mismatch_p (tree new_decl, tree delete_decl)
13193 {
13194 tree new_name = DECL_ASSEMBLER_NAME (new_decl);
13195 tree delete_name = DECL_ASSEMBLER_NAME (delete_decl);
13196
13197 /* valid_new_delete_pair_p() returns a conservative result (currently
13198 it only handles global operators). A true result is reliable but
13199 a false result doesn't necessarily mean the operators don't match. */
13200 if (valid_new_delete_pair_p (new_name, delete_name))
13201 return false;
13202
13203 /* For anything not handled by valid_new_delete_pair_p() such as member
13204 operators compare the individual demangled components of the mangled
13205 name. */
13206 const char *new_str = IDENTIFIER_POINTER (new_name);
13207 const char *del_str = IDENTIFIER_POINTER (delete_name);
13208
13209 void *np = NULL, *dp = NULL;
13210 demangle_component *ndc = cplus_demangle_v3_components (new_str, 0, &np);
13211 demangle_component *ddc = cplus_demangle_v3_components (del_str, 0, &dp);
13212 bool mismatch = new_delete_mismatch_p (*ndc, *ddc);
13213 free (np);
13214 free (dp);
13215 return mismatch;
13216 }
13217
13218 /* ALLOC_DECL and DEALLOC_DECL are pair of allocation and deallocation
13219 functions. Return true if the latter is suitable to deallocate objects
13220 allocated by calls to the former. */
13221
13222 static bool
13223 matching_alloc_calls_p (tree alloc_decl, tree dealloc_decl)
13224 {
13225 /* Set to alloc_kind_t::builtin if ALLOC_DECL is associated with
13226 a built-in deallocator. */
13227 enum class alloc_kind_t { none, builtin, user }
13228 alloc_dealloc_kind = alloc_kind_t::none;
13229
13230 if (DECL_IS_OPERATOR_NEW_P (alloc_decl))
13231 {
13232 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13233 /* Return true iff both functions are of the same array or
13234 singleton form and false otherwise. */
13235 return !new_delete_mismatch_p (alloc_decl, dealloc_decl);
13236
13237 /* Return false for deallocation functions that are known not
13238 to match. */
13239 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13240 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13241 return false;
13242 /* Otherwise proceed below to check the deallocation function's
13243 "*dealloc" attributes to look for one that mentions this operator
13244 new. */
13245 }
13246 else if (fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL))
13247 {
13248 switch (DECL_FUNCTION_CODE (alloc_decl))
13249 {
13250 case BUILT_IN_ALLOCA:
13251 case BUILT_IN_ALLOCA_WITH_ALIGN:
13252 return false;
13253
13254 case BUILT_IN_ALIGNED_ALLOC:
13255 case BUILT_IN_CALLOC:
13256 case BUILT_IN_GOMP_ALLOC:
13257 case BUILT_IN_MALLOC:
13258 case BUILT_IN_REALLOC:
13259 case BUILT_IN_STRDUP:
13260 case BUILT_IN_STRNDUP:
13261 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13262 return false;
13263
13264 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13265 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13266 return true;
13267
13268 alloc_dealloc_kind = alloc_kind_t::builtin;
13269 break;
13270
13271 default:
13272 break;
13273 }
13274 }
13275
13276 /* Set if DEALLOC_DECL both allocates and deallocates. */
13277 alloc_kind_t realloc_kind = alloc_kind_t::none;
13278
13279 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_NORMAL))
13280 {
13281 built_in_function dealloc_code = DECL_FUNCTION_CODE (dealloc_decl);
13282 if (dealloc_code == BUILT_IN_REALLOC)
13283 realloc_kind = alloc_kind_t::builtin;
13284
13285 for (tree amats = DECL_ATTRIBUTES (alloc_decl);
13286 (amats = lookup_attribute ("malloc", amats));
13287 amats = TREE_CHAIN (amats))
13288 {
13289 tree args = TREE_VALUE (amats);
13290 if (!args)
13291 continue;
13292
13293 tree fndecl = TREE_VALUE (args);
13294 if (!fndecl || !DECL_P (fndecl))
13295 continue;
13296
13297 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
13298 && dealloc_code == DECL_FUNCTION_CODE (fndecl))
13299 return true;
13300 }
13301 }
13302
13303 const bool alloc_builtin = fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL);
13304 alloc_kind_t realloc_dealloc_kind = alloc_kind_t::none;
13305
13306 /* If DEALLOC_DECL has an internal "*dealloc" attribute scan the list
13307 of its associated allocation functions for ALLOC_DECL.
13308 If the corresponding ALLOC_DECL is found they're a matching pair,
13309 otherwise they're not.
13310 With DDATS set to the Deallocator's *Dealloc ATtributes... */
13311 for (tree ddats = DECL_ATTRIBUTES (dealloc_decl);
13312 (ddats = lookup_attribute ("*dealloc", ddats));
13313 ddats = TREE_CHAIN (ddats))
13314 {
13315 tree args = TREE_VALUE (ddats);
13316 if (!args)
13317 continue;
13318
13319 tree alloc = TREE_VALUE (args);
13320 if (!alloc)
13321 continue;
13322
13323 if (alloc == DECL_NAME (dealloc_decl))
13324 realloc_kind = alloc_kind_t::user;
13325
13326 if (DECL_P (alloc))
13327 {
13328 gcc_checking_assert (fndecl_built_in_p (alloc, BUILT_IN_NORMAL));
13329
13330 switch (DECL_FUNCTION_CODE (alloc))
13331 {
13332 case BUILT_IN_ALIGNED_ALLOC:
13333 case BUILT_IN_CALLOC:
13334 case BUILT_IN_GOMP_ALLOC:
13335 case BUILT_IN_MALLOC:
13336 case BUILT_IN_REALLOC:
13337 case BUILT_IN_STRDUP:
13338 case BUILT_IN_STRNDUP:
13339 realloc_dealloc_kind = alloc_kind_t::builtin;
13340 break;
13341 default:
13342 break;
13343 }
13344
13345 if (!alloc_builtin)
13346 continue;
13347
13348 if (DECL_FUNCTION_CODE (alloc) != DECL_FUNCTION_CODE (alloc_decl))
13349 continue;
13350
13351 return true;
13352 }
13353
13354 if (alloc == DECL_NAME (alloc_decl))
13355 return true;
13356 }
13357
13358 if (realloc_kind == alloc_kind_t::none)
13359 return false;
13360
13361 hash_set<tree> common_deallocs;
13362 /* Special handling for deallocators. Iterate over both the allocator's
13363 and the reallocator's associated deallocator functions looking for
13364 the first one in common. If one is found, the de/reallocator is
13365 a match for the allocator even though the latter isn't directly
13366 associated with the former. This simplifies declarations in system
13367 headers.
13368 With AMATS set to the Allocator's Malloc ATtributes,
13369 and RMATS set to Reallocator's Malloc ATtributes... */
13370 for (tree amats = DECL_ATTRIBUTES (alloc_decl),
13371 rmats = DECL_ATTRIBUTES (dealloc_decl);
13372 (amats = lookup_attribute ("malloc", amats))
13373 || (rmats = lookup_attribute ("malloc", rmats));
13374 amats = amats ? TREE_CHAIN (amats) : NULL_TREE,
13375 rmats = rmats ? TREE_CHAIN (rmats) : NULL_TREE)
13376 {
13377 if (tree args = amats ? TREE_VALUE (amats) : NULL_TREE)
13378 if (tree adealloc = TREE_VALUE (args))
13379 {
13380 if (DECL_P (adealloc)
13381 && fndecl_built_in_p (adealloc, BUILT_IN_NORMAL))
13382 {
13383 built_in_function fncode = DECL_FUNCTION_CODE (adealloc);
13384 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13385 {
13386 if (realloc_kind == alloc_kind_t::builtin)
13387 return true;
13388 alloc_dealloc_kind = alloc_kind_t::builtin;
13389 }
13390 continue;
13391 }
13392
13393 common_deallocs.add (adealloc);
13394 }
13395
13396 if (tree args = rmats ? TREE_VALUE (rmats) : NULL_TREE)
13397 if (tree ddealloc = TREE_VALUE (args))
13398 {
13399 if (DECL_P (ddealloc)
13400 && fndecl_built_in_p (ddealloc, BUILT_IN_NORMAL))
13401 {
13402 built_in_function fncode = DECL_FUNCTION_CODE (ddealloc);
13403 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13404 {
13405 if (alloc_dealloc_kind == alloc_kind_t::builtin)
13406 return true;
13407 realloc_dealloc_kind = alloc_kind_t::builtin;
13408 }
13409 continue;
13410 }
13411
13412 if (common_deallocs.add (ddealloc))
13413 return true;
13414 }
13415 }
13416
13417 /* Succeed only if ALLOC_DECL and the reallocator DEALLOC_DECL share
13418 a built-in deallocator. */
13419 return (alloc_dealloc_kind == alloc_kind_t::builtin
13420 && realloc_dealloc_kind == alloc_kind_t::builtin);
13421 }
13422
13423 /* Return true if DEALLOC_DECL is a function suitable to deallocate
13424 objectes allocated by the ALLOC call. */
13425
13426 static bool
13427 matching_alloc_calls_p (gimple *alloc, tree dealloc_decl)
13428 {
13429 tree alloc_decl = gimple_call_fndecl (alloc);
13430 if (!alloc_decl)
13431 return true;
13432
13433 return matching_alloc_calls_p (alloc_decl, dealloc_decl);
13434 }
13435
13436 /* Diagnose a call EXP to deallocate a pointer referenced by AREF if it
13437 includes a nonzero offset. Such a pointer cannot refer to the beginning
13438 of an allocated object. A negative offset may refer to it only if
13439 the target pointer is unknown. */
13440
13441 static bool
13442 warn_dealloc_offset (location_t loc, tree exp, const access_ref &aref)
13443 {
13444 if (aref.deref || aref.offrng[0] <= 0 || aref.offrng[1] <= 0)
13445 return false;
13446
13447 tree dealloc_decl = get_callee_fndecl (exp);
13448 if (!dealloc_decl)
13449 return false;
13450
13451 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13452 && !DECL_IS_REPLACEABLE_OPERATOR (dealloc_decl))
13453 {
13454 /* A call to a user-defined operator delete with a pointer plus offset
13455 may be valid if it's returned from an unknown function (i.e., one
13456 that's not operator new). */
13457 if (TREE_CODE (aref.ref) == SSA_NAME)
13458 {
13459 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13460 if (is_gimple_call (def_stmt))
13461 {
13462 tree alloc_decl = gimple_call_fndecl (def_stmt);
13463 if (!alloc_decl || !DECL_IS_OPERATOR_NEW_P (alloc_decl))
13464 return false;
13465 }
13466 }
13467 }
13468
13469 char offstr[80];
13470 offstr[0] = '\0';
13471 if (wi::fits_shwi_p (aref.offrng[0]))
13472 {
13473 if (aref.offrng[0] == aref.offrng[1]
13474 || !wi::fits_shwi_p (aref.offrng[1]))
13475 sprintf (offstr, " %lli",
13476 (long long)aref.offrng[0].to_shwi ());
13477 else
13478 sprintf (offstr, " [%lli, %lli]",
13479 (long long)aref.offrng[0].to_shwi (),
13480 (long long)aref.offrng[1].to_shwi ());
13481 }
13482
13483 if (!warning_at (loc, OPT_Wfree_nonheap_object,
13484 "%K%qD called on pointer %qE with nonzero offset%s",
13485 exp, dealloc_decl, aref.ref, offstr))
13486 return false;
13487
13488 if (DECL_P (aref.ref))
13489 inform (DECL_SOURCE_LOCATION (aref.ref), "declared here");
13490 else if (TREE_CODE (aref.ref) == SSA_NAME)
13491 {
13492 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13493 if (is_gimple_call (def_stmt))
13494 {
13495 location_t def_loc = gimple_location (def_stmt);
13496 tree alloc_decl = gimple_call_fndecl (def_stmt);
13497 if (alloc_decl)
13498 inform (def_loc,
13499 "returned from %qD", alloc_decl);
13500 else if (tree alloc_fntype = gimple_call_fntype (def_stmt))
13501 inform (def_loc,
13502 "returned from %qT", alloc_fntype);
13503 else
13504 inform (def_loc, "obtained here");
13505 }
13506 }
13507
13508 return true;
13509 }
13510
13511 /* Issue a warning if a deallocation function such as free, realloc,
13512 or C++ operator delete is called with an argument not returned by
13513 a matching allocation function such as malloc or the corresponding
13514 form of C++ operatorn new. */
13515
13516 void
13517 maybe_emit_free_warning (tree exp)
13518 {
13519 tree fndecl = get_callee_fndecl (exp);
13520 if (!fndecl)
13521 return;
13522
13523 unsigned argno = call_dealloc_argno (exp);
13524 if ((unsigned) call_expr_nargs (exp) <= argno)
13525 return;
13526
13527 tree ptr = CALL_EXPR_ARG (exp, argno);
13528 if (integer_zerop (ptr))
13529 return;
13530
13531 access_ref aref;
13532 if (!compute_objsize (ptr, 0, &aref))
13533 return;
13534
13535 tree ref = aref.ref;
13536 if (integer_zerop (ref))
13537 return;
13538
13539 tree dealloc_decl = get_callee_fndecl (exp);
13540 location_t loc = tree_inlined_location (exp);
13541
13542 if (DECL_P (ref) || EXPR_P (ref))
13543 {
13544 /* Diagnose freeing a declared object. */
13545 if (aref.ref_declared ()
13546 && warning_at (loc, OPT_Wfree_nonheap_object,
13547 "%K%qD called on unallocated object %qD",
13548 exp, dealloc_decl, ref))
13549 {
13550 loc = (DECL_P (ref)
13551 ? DECL_SOURCE_LOCATION (ref)
13552 : EXPR_LOCATION (ref));
13553 inform (loc, "declared here");
13554 return;
13555 }
13556
13557 /* Diagnose freeing a pointer that includes a positive offset.
13558 Such a pointer cannot refer to the beginning of an allocated
13559 object. A negative offset may refer to it. */
13560 if (aref.sizrng[0] != aref.sizrng[1]
13561 && warn_dealloc_offset (loc, exp, aref))
13562 return;
13563 }
13564 else if (CONSTANT_CLASS_P (ref))
13565 {
13566 if (warning_at (loc, OPT_Wfree_nonheap_object,
13567 "%K%qD called on a pointer to an unallocated "
13568 "object %qE", exp, dealloc_decl, ref))
13569 {
13570 if (TREE_CODE (ptr) == SSA_NAME)
13571 {
13572 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
13573 if (is_gimple_assign (def_stmt))
13574 {
13575 location_t loc = gimple_location (def_stmt);
13576 inform (loc, "assigned here");
13577 }
13578 }
13579 return;
13580 }
13581 }
13582 else if (TREE_CODE (ref) == SSA_NAME)
13583 {
13584 /* Also warn if the pointer argument refers to the result
13585 of an allocation call like alloca or VLA. */
13586 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
13587 if (is_gimple_call (def_stmt))
13588 {
13589 bool warned = false;
13590 if (gimple_call_alloc_p (def_stmt))
13591 {
13592 if (matching_alloc_calls_p (def_stmt, dealloc_decl))
13593 {
13594 if (warn_dealloc_offset (loc, exp, aref))
13595 return;
13596 }
13597 else
13598 {
13599 tree alloc_decl = gimple_call_fndecl (def_stmt);
13600 int opt = (DECL_IS_OPERATOR_NEW_P (alloc_decl)
13601 || DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13602 ? OPT_Wmismatched_new_delete
13603 : OPT_Wmismatched_dealloc);
13604 warned = warning_at (loc, opt,
13605 "%K%qD called on pointer returned "
13606 "from a mismatched allocation "
13607 "function", exp, dealloc_decl);
13608 }
13609 }
13610 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_ALLOCA)
13611 || gimple_call_builtin_p (def_stmt,
13612 BUILT_IN_ALLOCA_WITH_ALIGN))
13613 warned = warning_at (loc, OPT_Wfree_nonheap_object,
13614 "%K%qD called on pointer to "
13615 "an unallocated object",
13616 exp, dealloc_decl);
13617 else if (warn_dealloc_offset (loc, exp, aref))
13618 return;
13619
13620 if (warned)
13621 {
13622 tree fndecl = gimple_call_fndecl (def_stmt);
13623 inform (gimple_location (def_stmt),
13624 "returned from %qD", fndecl);
13625 return;
13626 }
13627 }
13628 else if (gimple_nop_p (def_stmt))
13629 {
13630 ref = SSA_NAME_VAR (ref);
13631 /* Diagnose freeing a pointer that includes a positive offset. */
13632 if (TREE_CODE (ref) == PARM_DECL
13633 && !aref.deref
13634 && aref.sizrng[0] != aref.sizrng[1]
13635 && aref.offrng[0] > 0 && aref.offrng[1] > 0
13636 && warn_dealloc_offset (loc, exp, aref))
13637 return;
13638 }
13639 }
13640 }
13641
13642 /* Fold a call to __builtin_object_size with arguments PTR and OST,
13643 if possible. */
13644
13645 static tree
13646 fold_builtin_object_size (tree ptr, tree ost)
13647 {
13648 unsigned HOST_WIDE_INT bytes;
13649 int object_size_type;
13650
13651 if (!validate_arg (ptr, POINTER_TYPE)
13652 || !validate_arg (ost, INTEGER_TYPE))
13653 return NULL_TREE;
13654
13655 STRIP_NOPS (ost);
13656
13657 if (TREE_CODE (ost) != INTEGER_CST
13658 || tree_int_cst_sgn (ost) < 0
13659 || compare_tree_int (ost, 3) > 0)
13660 return NULL_TREE;
13661
13662 object_size_type = tree_to_shwi (ost);
13663
13664 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
13665 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
13666 and (size_t) 0 for types 2 and 3. */
13667 if (TREE_SIDE_EFFECTS (ptr))
13668 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
13669
13670 if (TREE_CODE (ptr) == ADDR_EXPR)
13671 {
13672 compute_builtin_object_size (ptr, object_size_type, &bytes);
13673 if (wi::fits_to_tree_p (bytes, size_type_node))
13674 return build_int_cstu (size_type_node, bytes);
13675 }
13676 else if (TREE_CODE (ptr) == SSA_NAME)
13677 {
13678 /* If object size is not known yet, delay folding until
13679 later. Maybe subsequent passes will help determining
13680 it. */
13681 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
13682 && wi::fits_to_tree_p (bytes, size_type_node))
13683 return build_int_cstu (size_type_node, bytes);
13684 }
13685
13686 return NULL_TREE;
13687 }
13688
13689 /* Builtins with folding operations that operate on "..." arguments
13690 need special handling; we need to store the arguments in a convenient
13691 data structure before attempting any folding. Fortunately there are
13692 only a few builtins that fall into this category. FNDECL is the
13693 function, EXP is the CALL_EXPR for the call. */
13694
13695 static tree
13696 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
13697 {
13698 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13699 tree ret = NULL_TREE;
13700
13701 switch (fcode)
13702 {
13703 case BUILT_IN_FPCLASSIFY:
13704 ret = fold_builtin_fpclassify (loc, args, nargs);
13705 break;
13706
13707 default:
13708 break;
13709 }
13710 if (ret)
13711 {
13712 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13713 SET_EXPR_LOCATION (ret, loc);
13714 TREE_NO_WARNING (ret) = 1;
13715 return ret;
13716 }
13717 return NULL_TREE;
13718 }
13719
13720 /* Initialize format string characters in the target charset. */
13721
13722 bool
13723 init_target_chars (void)
13724 {
13725 static bool init;
13726 if (!init)
13727 {
13728 target_newline = lang_hooks.to_target_charset ('\n');
13729 target_percent = lang_hooks.to_target_charset ('%');
13730 target_c = lang_hooks.to_target_charset ('c');
13731 target_s = lang_hooks.to_target_charset ('s');
13732 if (target_newline == 0 || target_percent == 0 || target_c == 0
13733 || target_s == 0)
13734 return false;
13735
13736 target_percent_c[0] = target_percent;
13737 target_percent_c[1] = target_c;
13738 target_percent_c[2] = '\0';
13739
13740 target_percent_s[0] = target_percent;
13741 target_percent_s[1] = target_s;
13742 target_percent_s[2] = '\0';
13743
13744 target_percent_s_newline[0] = target_percent;
13745 target_percent_s_newline[1] = target_s;
13746 target_percent_s_newline[2] = target_newline;
13747 target_percent_s_newline[3] = '\0';
13748
13749 init = true;
13750 }
13751 return true;
13752 }
13753
13754 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13755 and no overflow/underflow occurred. INEXACT is true if M was not
13756 exactly calculated. TYPE is the tree type for the result. This
13757 function assumes that you cleared the MPFR flags and then
13758 calculated M to see if anything subsequently set a flag prior to
13759 entering this function. Return NULL_TREE if any checks fail. */
13760
13761 static tree
13762 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13763 {
13764 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13765 overflow/underflow occurred. If -frounding-math, proceed iff the
13766 result of calling FUNC was exact. */
13767 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13768 && (!flag_rounding_math || !inexact))
13769 {
13770 REAL_VALUE_TYPE rr;
13771
13772 real_from_mpfr (&rr, m, type, MPFR_RNDN);
13773 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13774 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13775 but the mpft_t is not, then we underflowed in the
13776 conversion. */
13777 if (real_isfinite (&rr)
13778 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13779 {
13780 REAL_VALUE_TYPE rmode;
13781
13782 real_convert (&rmode, TYPE_MODE (type), &rr);
13783 /* Proceed iff the specified mode can hold the value. */
13784 if (real_identical (&rmode, &rr))
13785 return build_real (type, rmode);
13786 }
13787 }
13788 return NULL_TREE;
13789 }
13790
13791 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13792 number and no overflow/underflow occurred. INEXACT is true if M
13793 was not exactly calculated. TYPE is the tree type for the result.
13794 This function assumes that you cleared the MPFR flags and then
13795 calculated M to see if anything subsequently set a flag prior to
13796 entering this function. Return NULL_TREE if any checks fail, if
13797 FORCE_CONVERT is true, then bypass the checks. */
13798
13799 static tree
13800 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13801 {
13802 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13803 overflow/underflow occurred. If -frounding-math, proceed iff the
13804 result of calling FUNC was exact. */
13805 if (force_convert
13806 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13807 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13808 && (!flag_rounding_math || !inexact)))
13809 {
13810 REAL_VALUE_TYPE re, im;
13811
13812 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
13813 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
13814 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13815 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13816 but the mpft_t is not, then we underflowed in the
13817 conversion. */
13818 if (force_convert
13819 || (real_isfinite (&re) && real_isfinite (&im)
13820 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13821 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13822 {
13823 REAL_VALUE_TYPE re_mode, im_mode;
13824
13825 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13826 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13827 /* Proceed iff the specified mode can hold the value. */
13828 if (force_convert
13829 || (real_identical (&re_mode, &re)
13830 && real_identical (&im_mode, &im)))
13831 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13832 build_real (TREE_TYPE (type), im_mode));
13833 }
13834 }
13835 return NULL_TREE;
13836 }
13837
13838 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13839 the pointer *(ARG_QUO) and return the result. The type is taken
13840 from the type of ARG0 and is used for setting the precision of the
13841 calculation and results. */
13842
13843 static tree
13844 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13845 {
13846 tree const type = TREE_TYPE (arg0);
13847 tree result = NULL_TREE;
13848
13849 STRIP_NOPS (arg0);
13850 STRIP_NOPS (arg1);
13851
13852 /* To proceed, MPFR must exactly represent the target floating point
13853 format, which only happens when the target base equals two. */
13854 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13855 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13856 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13857 {
13858 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13859 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13860
13861 if (real_isfinite (ra0) && real_isfinite (ra1))
13862 {
13863 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13864 const int prec = fmt->p;
13865 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
13866 tree result_rem;
13867 long integer_quo;
13868 mpfr_t m0, m1;
13869
13870 mpfr_inits2 (prec, m0, m1, NULL);
13871 mpfr_from_real (m0, ra0, MPFR_RNDN);
13872 mpfr_from_real (m1, ra1, MPFR_RNDN);
13873 mpfr_clear_flags ();
13874 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13875 /* Remquo is independent of the rounding mode, so pass
13876 inexact=0 to do_mpfr_ckconv(). */
13877 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13878 mpfr_clears (m0, m1, NULL);
13879 if (result_rem)
13880 {
13881 /* MPFR calculates quo in the host's long so it may
13882 return more bits in quo than the target int can hold
13883 if sizeof(host long) > sizeof(target int). This can
13884 happen even for native compilers in LP64 mode. In
13885 these cases, modulo the quo value with the largest
13886 number that the target int can hold while leaving one
13887 bit for the sign. */
13888 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13889 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13890
13891 /* Dereference the quo pointer argument. */
13892 arg_quo = build_fold_indirect_ref (arg_quo);
13893 /* Proceed iff a valid pointer type was passed in. */
13894 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13895 {
13896 /* Set the value. */
13897 tree result_quo
13898 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13899 build_int_cst (TREE_TYPE (arg_quo),
13900 integer_quo));
13901 TREE_SIDE_EFFECTS (result_quo) = 1;
13902 /* Combine the quo assignment with the rem. */
13903 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13904 result_quo, result_rem));
13905 }
13906 }
13907 }
13908 }
13909 return result;
13910 }
13911
13912 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13913 resulting value as a tree with type TYPE. The mpfr precision is
13914 set to the precision of TYPE. We assume that this mpfr function
13915 returns zero if the result could be calculated exactly within the
13916 requested precision. In addition, the integer pointer represented
13917 by ARG_SG will be dereferenced and set to the appropriate signgam
13918 (-1,1) value. */
13919
13920 static tree
13921 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13922 {
13923 tree result = NULL_TREE;
13924
13925 STRIP_NOPS (arg);
13926
13927 /* To proceed, MPFR must exactly represent the target floating point
13928 format, which only happens when the target base equals two. Also
13929 verify ARG is a constant and that ARG_SG is an int pointer. */
13930 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13931 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13932 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13933 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13934 {
13935 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13936
13937 /* In addition to NaN and Inf, the argument cannot be zero or a
13938 negative integer. */
13939 if (real_isfinite (ra)
13940 && ra->cl != rvc_zero
13941 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13942 {
13943 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13944 const int prec = fmt->p;
13945 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
13946 int inexact, sg;
13947 mpfr_t m;
13948 tree result_lg;
13949
13950 mpfr_init2 (m, prec);
13951 mpfr_from_real (m, ra, MPFR_RNDN);
13952 mpfr_clear_flags ();
13953 inexact = mpfr_lgamma (m, &sg, m, rnd);
13954 result_lg = do_mpfr_ckconv (m, type, inexact);
13955 mpfr_clear (m);
13956 if (result_lg)
13957 {
13958 tree result_sg;
13959
13960 /* Dereference the arg_sg pointer argument. */
13961 arg_sg = build_fold_indirect_ref (arg_sg);
13962 /* Assign the signgam value into *arg_sg. */
13963 result_sg = fold_build2 (MODIFY_EXPR,
13964 TREE_TYPE (arg_sg), arg_sg,
13965 build_int_cst (TREE_TYPE (arg_sg), sg));
13966 TREE_SIDE_EFFECTS (result_sg) = 1;
13967 /* Combine the signgam assignment with the lgamma result. */
13968 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13969 result_sg, result_lg));
13970 }
13971 }
13972 }
13973
13974 return result;
13975 }
13976
13977 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13978 mpc function FUNC on it and return the resulting value as a tree
13979 with type TYPE. The mpfr precision is set to the precision of
13980 TYPE. We assume that function FUNC returns zero if the result
13981 could be calculated exactly within the requested precision. If
13982 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13983 in the arguments and/or results. */
13984
13985 tree
13986 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13987 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13988 {
13989 tree result = NULL_TREE;
13990
13991 STRIP_NOPS (arg0);
13992 STRIP_NOPS (arg1);
13993
13994 /* To proceed, MPFR must exactly represent the target floating point
13995 format, which only happens when the target base equals two. */
13996 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13997 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13998 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13999 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14000 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14001 {
14002 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14003 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14004 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14005 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14006
14007 if (do_nonfinite
14008 || (real_isfinite (re0) && real_isfinite (im0)
14009 && real_isfinite (re1) && real_isfinite (im1)))
14010 {
14011 const struct real_format *const fmt =
14012 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14013 const int prec = fmt->p;
14014 const mpfr_rnd_t rnd = fmt->round_towards_zero
14015 ? MPFR_RNDZ : MPFR_RNDN;
14016 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14017 int inexact;
14018 mpc_t m0, m1;
14019
14020 mpc_init2 (m0, prec);
14021 mpc_init2 (m1, prec);
14022 mpfr_from_real (mpc_realref (m0), re0, rnd);
14023 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14024 mpfr_from_real (mpc_realref (m1), re1, rnd);
14025 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14026 mpfr_clear_flags ();
14027 inexact = func (m0, m0, m1, crnd);
14028 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14029 mpc_clear (m0);
14030 mpc_clear (m1);
14031 }
14032 }
14033
14034 return result;
14035 }
14036
14037 /* A wrapper function for builtin folding that prevents warnings for
14038 "statement without effect" and the like, caused by removing the
14039 call node earlier than the warning is generated. */
14040
14041 tree
14042 fold_call_stmt (gcall *stmt, bool ignore)
14043 {
14044 tree ret = NULL_TREE;
14045 tree fndecl = gimple_call_fndecl (stmt);
14046 location_t loc = gimple_location (stmt);
14047 if (fndecl && fndecl_built_in_p (fndecl)
14048 && !gimple_call_va_arg_pack_p (stmt))
14049 {
14050 int nargs = gimple_call_num_args (stmt);
14051 tree *args = (nargs > 0
14052 ? gimple_call_arg_ptr (stmt, 0)
14053 : &error_mark_node);
14054
14055 if (avoid_folding_inline_builtin (fndecl))
14056 return NULL_TREE;
14057 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14058 {
14059 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14060 }
14061 else
14062 {
14063 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
14064 if (ret)
14065 {
14066 /* Propagate location information from original call to
14067 expansion of builtin. Otherwise things like
14068 maybe_emit_chk_warning, that operate on the expansion
14069 of a builtin, will use the wrong location information. */
14070 if (gimple_has_location (stmt))
14071 {
14072 tree realret = ret;
14073 if (TREE_CODE (ret) == NOP_EXPR)
14074 realret = TREE_OPERAND (ret, 0);
14075 if (CAN_HAVE_LOCATION_P (realret)
14076 && !EXPR_HAS_LOCATION (realret))
14077 SET_EXPR_LOCATION (realret, loc);
14078 return realret;
14079 }
14080 return ret;
14081 }
14082 }
14083 }
14084 return NULL_TREE;
14085 }
14086
14087 /* Look up the function in builtin_decl that corresponds to DECL
14088 and set ASMSPEC as its user assembler name. DECL must be a
14089 function decl that declares a builtin. */
14090
14091 void
14092 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14093 {
14094 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
14095 && asmspec != 0);
14096
14097 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14098 set_user_assembler_name (builtin, asmspec);
14099
14100 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
14101 && INT_TYPE_SIZE < BITS_PER_WORD)
14102 {
14103 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
14104 set_user_assembler_libfunc ("ffs", asmspec);
14105 set_optab_libfunc (ffs_optab, mode, "ffs");
14106 }
14107 }
14108
14109 /* Return true if DECL is a builtin that expands to a constant or similarly
14110 simple code. */
14111 bool
14112 is_simple_builtin (tree decl)
14113 {
14114 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
14115 switch (DECL_FUNCTION_CODE (decl))
14116 {
14117 /* Builtins that expand to constants. */
14118 case BUILT_IN_CONSTANT_P:
14119 case BUILT_IN_EXPECT:
14120 case BUILT_IN_OBJECT_SIZE:
14121 case BUILT_IN_UNREACHABLE:
14122 /* Simple register moves or loads from stack. */
14123 case BUILT_IN_ASSUME_ALIGNED:
14124 case BUILT_IN_RETURN_ADDRESS:
14125 case BUILT_IN_EXTRACT_RETURN_ADDR:
14126 case BUILT_IN_FROB_RETURN_ADDR:
14127 case BUILT_IN_RETURN:
14128 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14129 case BUILT_IN_FRAME_ADDRESS:
14130 case BUILT_IN_VA_END:
14131 case BUILT_IN_STACK_SAVE:
14132 case BUILT_IN_STACK_RESTORE:
14133 /* Exception state returns or moves registers around. */
14134 case BUILT_IN_EH_FILTER:
14135 case BUILT_IN_EH_POINTER:
14136 case BUILT_IN_EH_COPY_VALUES:
14137 return true;
14138
14139 default:
14140 return false;
14141 }
14142
14143 return false;
14144 }
14145
14146 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14147 most probably expanded inline into reasonably simple code. This is a
14148 superset of is_simple_builtin. */
14149 bool
14150 is_inexpensive_builtin (tree decl)
14151 {
14152 if (!decl)
14153 return false;
14154 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14155 return true;
14156 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14157 switch (DECL_FUNCTION_CODE (decl))
14158 {
14159 case BUILT_IN_ABS:
14160 CASE_BUILT_IN_ALLOCA:
14161 case BUILT_IN_BSWAP16:
14162 case BUILT_IN_BSWAP32:
14163 case BUILT_IN_BSWAP64:
14164 case BUILT_IN_BSWAP128:
14165 case BUILT_IN_CLZ:
14166 case BUILT_IN_CLZIMAX:
14167 case BUILT_IN_CLZL:
14168 case BUILT_IN_CLZLL:
14169 case BUILT_IN_CTZ:
14170 case BUILT_IN_CTZIMAX:
14171 case BUILT_IN_CTZL:
14172 case BUILT_IN_CTZLL:
14173 case BUILT_IN_FFS:
14174 case BUILT_IN_FFSIMAX:
14175 case BUILT_IN_FFSL:
14176 case BUILT_IN_FFSLL:
14177 case BUILT_IN_IMAXABS:
14178 case BUILT_IN_FINITE:
14179 case BUILT_IN_FINITEF:
14180 case BUILT_IN_FINITEL:
14181 case BUILT_IN_FINITED32:
14182 case BUILT_IN_FINITED64:
14183 case BUILT_IN_FINITED128:
14184 case BUILT_IN_FPCLASSIFY:
14185 case BUILT_IN_ISFINITE:
14186 case BUILT_IN_ISINF_SIGN:
14187 case BUILT_IN_ISINF:
14188 case BUILT_IN_ISINFF:
14189 case BUILT_IN_ISINFL:
14190 case BUILT_IN_ISINFD32:
14191 case BUILT_IN_ISINFD64:
14192 case BUILT_IN_ISINFD128:
14193 case BUILT_IN_ISNAN:
14194 case BUILT_IN_ISNANF:
14195 case BUILT_IN_ISNANL:
14196 case BUILT_IN_ISNAND32:
14197 case BUILT_IN_ISNAND64:
14198 case BUILT_IN_ISNAND128:
14199 case BUILT_IN_ISNORMAL:
14200 case BUILT_IN_ISGREATER:
14201 case BUILT_IN_ISGREATEREQUAL:
14202 case BUILT_IN_ISLESS:
14203 case BUILT_IN_ISLESSEQUAL:
14204 case BUILT_IN_ISLESSGREATER:
14205 case BUILT_IN_ISUNORDERED:
14206 case BUILT_IN_VA_ARG_PACK:
14207 case BUILT_IN_VA_ARG_PACK_LEN:
14208 case BUILT_IN_VA_COPY:
14209 case BUILT_IN_TRAP:
14210 case BUILT_IN_SAVEREGS:
14211 case BUILT_IN_POPCOUNTL:
14212 case BUILT_IN_POPCOUNTLL:
14213 case BUILT_IN_POPCOUNTIMAX:
14214 case BUILT_IN_POPCOUNT:
14215 case BUILT_IN_PARITYL:
14216 case BUILT_IN_PARITYLL:
14217 case BUILT_IN_PARITYIMAX:
14218 case BUILT_IN_PARITY:
14219 case BUILT_IN_LABS:
14220 case BUILT_IN_LLABS:
14221 case BUILT_IN_PREFETCH:
14222 case BUILT_IN_ACC_ON_DEVICE:
14223 return true;
14224
14225 default:
14226 return is_simple_builtin (decl);
14227 }
14228
14229 return false;
14230 }
14231
14232 /* Return true if T is a constant and the value cast to a target char
14233 can be represented by a host char.
14234 Store the casted char constant in *P if so. */
14235
14236 bool
14237 target_char_cst_p (tree t, char *p)
14238 {
14239 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
14240 return false;
14241
14242 *p = (char)tree_to_uhwi (t);
14243 return true;
14244 }
14245
14246 /* Return true if the builtin DECL is implemented in a standard library.
14247 Otherwise returns false which doesn't guarantee it is not (thus the list of
14248 handled builtins below may be incomplete). */
14249
14250 bool
14251 builtin_with_linkage_p (tree decl)
14252 {
14253 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14254 switch (DECL_FUNCTION_CODE (decl))
14255 {
14256 CASE_FLT_FN (BUILT_IN_ACOS):
14257 CASE_FLT_FN (BUILT_IN_ACOSH):
14258 CASE_FLT_FN (BUILT_IN_ASIN):
14259 CASE_FLT_FN (BUILT_IN_ASINH):
14260 CASE_FLT_FN (BUILT_IN_ATAN):
14261 CASE_FLT_FN (BUILT_IN_ATANH):
14262 CASE_FLT_FN (BUILT_IN_ATAN2):
14263 CASE_FLT_FN (BUILT_IN_CBRT):
14264 CASE_FLT_FN (BUILT_IN_CEIL):
14265 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
14266 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14267 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
14268 CASE_FLT_FN (BUILT_IN_COS):
14269 CASE_FLT_FN (BUILT_IN_COSH):
14270 CASE_FLT_FN (BUILT_IN_ERF):
14271 CASE_FLT_FN (BUILT_IN_ERFC):
14272 CASE_FLT_FN (BUILT_IN_EXP):
14273 CASE_FLT_FN (BUILT_IN_EXP2):
14274 CASE_FLT_FN (BUILT_IN_EXPM1):
14275 CASE_FLT_FN (BUILT_IN_FABS):
14276 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
14277 CASE_FLT_FN (BUILT_IN_FDIM):
14278 CASE_FLT_FN (BUILT_IN_FLOOR):
14279 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
14280 CASE_FLT_FN (BUILT_IN_FMA):
14281 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
14282 CASE_FLT_FN (BUILT_IN_FMAX):
14283 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
14284 CASE_FLT_FN (BUILT_IN_FMIN):
14285 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
14286 CASE_FLT_FN (BUILT_IN_FMOD):
14287 CASE_FLT_FN (BUILT_IN_FREXP):
14288 CASE_FLT_FN (BUILT_IN_HYPOT):
14289 CASE_FLT_FN (BUILT_IN_ILOGB):
14290 CASE_FLT_FN (BUILT_IN_LDEXP):
14291 CASE_FLT_FN (BUILT_IN_LGAMMA):
14292 CASE_FLT_FN (BUILT_IN_LLRINT):
14293 CASE_FLT_FN (BUILT_IN_LLROUND):
14294 CASE_FLT_FN (BUILT_IN_LOG):
14295 CASE_FLT_FN (BUILT_IN_LOG10):
14296 CASE_FLT_FN (BUILT_IN_LOG1P):
14297 CASE_FLT_FN (BUILT_IN_LOG2):
14298 CASE_FLT_FN (BUILT_IN_LOGB):
14299 CASE_FLT_FN (BUILT_IN_LRINT):
14300 CASE_FLT_FN (BUILT_IN_LROUND):
14301 CASE_FLT_FN (BUILT_IN_MODF):
14302 CASE_FLT_FN (BUILT_IN_NAN):
14303 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14304 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
14305 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
14306 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
14307 CASE_FLT_FN (BUILT_IN_POW):
14308 CASE_FLT_FN (BUILT_IN_REMAINDER):
14309 CASE_FLT_FN (BUILT_IN_REMQUO):
14310 CASE_FLT_FN (BUILT_IN_RINT):
14311 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
14312 CASE_FLT_FN (BUILT_IN_ROUND):
14313 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
14314 CASE_FLT_FN (BUILT_IN_SCALBLN):
14315 CASE_FLT_FN (BUILT_IN_SCALBN):
14316 CASE_FLT_FN (BUILT_IN_SIN):
14317 CASE_FLT_FN (BUILT_IN_SINH):
14318 CASE_FLT_FN (BUILT_IN_SINCOS):
14319 CASE_FLT_FN (BUILT_IN_SQRT):
14320 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
14321 CASE_FLT_FN (BUILT_IN_TAN):
14322 CASE_FLT_FN (BUILT_IN_TANH):
14323 CASE_FLT_FN (BUILT_IN_TGAMMA):
14324 CASE_FLT_FN (BUILT_IN_TRUNC):
14325 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
14326 return true;
14327 default:
14328 break;
14329 }
14330 return false;
14331 }
14332
14333 /* Return true if OFFRNG is bounded to a subrange of offset values
14334 valid for the largest possible object. */
14335
14336 bool
14337 access_ref::offset_bounded () const
14338 {
14339 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
14340 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
14341 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
14342 }
14343
14344 /* If CALLEE has known side effects, fill in INFO and return true.
14345 See tree-ssa-structalias.c:find_func_aliases
14346 for the list of builtins we might need to handle here. */
14347
14348 attr_fnspec
14349 builtin_fnspec (tree callee)
14350 {
14351 built_in_function code = DECL_FUNCTION_CODE (callee);
14352
14353 switch (code)
14354 {
14355 /* All the following functions read memory pointed to by
14356 their second argument and write memory pointed to by first
14357 argument.
14358 strcat/strncat additionally reads memory pointed to by the first
14359 argument. */
14360 case BUILT_IN_STRCAT:
14361 case BUILT_IN_STRCAT_CHK:
14362 return "1cW 1 ";
14363 case BUILT_IN_STRNCAT:
14364 case BUILT_IN_STRNCAT_CHK:
14365 return "1cW 13";
14366 case BUILT_IN_STRCPY:
14367 case BUILT_IN_STRCPY_CHK:
14368 return "1cO 1 ";
14369 case BUILT_IN_STPCPY:
14370 case BUILT_IN_STPCPY_CHK:
14371 return ".cO 1 ";
14372 case BUILT_IN_STRNCPY:
14373 case BUILT_IN_MEMCPY:
14374 case BUILT_IN_MEMMOVE:
14375 case BUILT_IN_TM_MEMCPY:
14376 case BUILT_IN_TM_MEMMOVE:
14377 case BUILT_IN_STRNCPY_CHK:
14378 case BUILT_IN_MEMCPY_CHK:
14379 case BUILT_IN_MEMMOVE_CHK:
14380 return "1cO313";
14381 case BUILT_IN_MEMPCPY:
14382 case BUILT_IN_MEMPCPY_CHK:
14383 return ".cO313";
14384 case BUILT_IN_STPNCPY:
14385 case BUILT_IN_STPNCPY_CHK:
14386 return ".cO313";
14387 case BUILT_IN_BCOPY:
14388 return ".c23O3";
14389 case BUILT_IN_BZERO:
14390 return ".cO2";
14391 case BUILT_IN_MEMCMP:
14392 case BUILT_IN_MEMCMP_EQ:
14393 case BUILT_IN_BCMP:
14394 case BUILT_IN_STRNCMP:
14395 case BUILT_IN_STRNCMP_EQ:
14396 case BUILT_IN_STRNCASECMP:
14397 return ".cR3R3";
14398
14399 /* The following functions read memory pointed to by their
14400 first argument. */
14401 CASE_BUILT_IN_TM_LOAD (1):
14402 CASE_BUILT_IN_TM_LOAD (2):
14403 CASE_BUILT_IN_TM_LOAD (4):
14404 CASE_BUILT_IN_TM_LOAD (8):
14405 CASE_BUILT_IN_TM_LOAD (FLOAT):
14406 CASE_BUILT_IN_TM_LOAD (DOUBLE):
14407 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
14408 CASE_BUILT_IN_TM_LOAD (M64):
14409 CASE_BUILT_IN_TM_LOAD (M128):
14410 CASE_BUILT_IN_TM_LOAD (M256):
14411 case BUILT_IN_TM_LOG:
14412 case BUILT_IN_TM_LOG_1:
14413 case BUILT_IN_TM_LOG_2:
14414 case BUILT_IN_TM_LOG_4:
14415 case BUILT_IN_TM_LOG_8:
14416 case BUILT_IN_TM_LOG_FLOAT:
14417 case BUILT_IN_TM_LOG_DOUBLE:
14418 case BUILT_IN_TM_LOG_LDOUBLE:
14419 case BUILT_IN_TM_LOG_M64:
14420 case BUILT_IN_TM_LOG_M128:
14421 case BUILT_IN_TM_LOG_M256:
14422 return ".cR ";
14423
14424 case BUILT_IN_INDEX:
14425 case BUILT_IN_RINDEX:
14426 case BUILT_IN_STRCHR:
14427 case BUILT_IN_STRLEN:
14428 case BUILT_IN_STRRCHR:
14429 return ".cR ";
14430 case BUILT_IN_STRNLEN:
14431 return ".cR2";
14432
14433 /* These read memory pointed to by the first argument.
14434 Allocating memory does not have any side-effects apart from
14435 being the definition point for the pointer.
14436 Unix98 specifies that errno is set on allocation failure. */
14437 case BUILT_IN_STRDUP:
14438 return "mCR ";
14439 case BUILT_IN_STRNDUP:
14440 return "mCR2";
14441 /* Allocating memory does not have any side-effects apart from
14442 being the definition point for the pointer. */
14443 case BUILT_IN_MALLOC:
14444 case BUILT_IN_ALIGNED_ALLOC:
14445 case BUILT_IN_CALLOC:
14446 case BUILT_IN_GOMP_ALLOC:
14447 return "mC";
14448 CASE_BUILT_IN_ALLOCA:
14449 return "mc";
14450 /* These read memory pointed to by the first argument with size
14451 in the third argument. */
14452 case BUILT_IN_MEMCHR:
14453 return ".cR3";
14454 /* These read memory pointed to by the first and second arguments. */
14455 case BUILT_IN_STRSTR:
14456 case BUILT_IN_STRPBRK:
14457 case BUILT_IN_STRCASECMP:
14458 case BUILT_IN_STRCSPN:
14459 case BUILT_IN_STRSPN:
14460 case BUILT_IN_STRCMP:
14461 case BUILT_IN_STRCMP_EQ:
14462 return ".cR R ";
14463 /* Freeing memory kills the pointed-to memory. More importantly
14464 the call has to serve as a barrier for moving loads and stores
14465 across it. */
14466 case BUILT_IN_STACK_RESTORE:
14467 case BUILT_IN_FREE:
14468 case BUILT_IN_GOMP_FREE:
14469 return ".co ";
14470 case BUILT_IN_VA_END:
14471 return ".cO ";
14472 /* Realloc serves both as allocation point and deallocation point. */
14473 case BUILT_IN_REALLOC:
14474 return ".Cw ";
14475 case BUILT_IN_GAMMA_R:
14476 case BUILT_IN_GAMMAF_R:
14477 case BUILT_IN_GAMMAL_R:
14478 case BUILT_IN_LGAMMA_R:
14479 case BUILT_IN_LGAMMAF_R:
14480 case BUILT_IN_LGAMMAL_R:
14481 return ".C. Ot";
14482 case BUILT_IN_FREXP:
14483 case BUILT_IN_FREXPF:
14484 case BUILT_IN_FREXPL:
14485 case BUILT_IN_MODF:
14486 case BUILT_IN_MODFF:
14487 case BUILT_IN_MODFL:
14488 return ".c. Ot";
14489 case BUILT_IN_REMQUO:
14490 case BUILT_IN_REMQUOF:
14491 case BUILT_IN_REMQUOL:
14492 return ".c. . Ot";
14493 case BUILT_IN_SINCOS:
14494 case BUILT_IN_SINCOSF:
14495 case BUILT_IN_SINCOSL:
14496 return ".c. OtOt";
14497 case BUILT_IN_MEMSET:
14498 case BUILT_IN_MEMSET_CHK:
14499 case BUILT_IN_TM_MEMSET:
14500 return "1cO3";
14501 CASE_BUILT_IN_TM_STORE (1):
14502 CASE_BUILT_IN_TM_STORE (2):
14503 CASE_BUILT_IN_TM_STORE (4):
14504 CASE_BUILT_IN_TM_STORE (8):
14505 CASE_BUILT_IN_TM_STORE (FLOAT):
14506 CASE_BUILT_IN_TM_STORE (DOUBLE):
14507 CASE_BUILT_IN_TM_STORE (LDOUBLE):
14508 CASE_BUILT_IN_TM_STORE (M64):
14509 CASE_BUILT_IN_TM_STORE (M128):
14510 CASE_BUILT_IN_TM_STORE (M256):
14511 return ".cO ";
14512 case BUILT_IN_STACK_SAVE:
14513 return ".c";
14514 case BUILT_IN_ASSUME_ALIGNED:
14515 return "1cX ";
14516 /* But posix_memalign stores a pointer into the memory pointed to
14517 by its first argument. */
14518 case BUILT_IN_POSIX_MEMALIGN:
14519 return ".cOt";
14520
14521 default:
14522 return "";
14523 }
14524 }